1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
53 #include "cfglayout.h"
54 #include "sched-int.h"
56 #include "xcoffout.h" /* get declarations of xcoff_*_section_name */
59 #ifndef TARGET_NO_PROTOTYPE
60 #define TARGET_NO_PROTOTYPE 0
63 #define EASY_VECTOR_15(n) ((n) >= -16 && (n) <= 15)
64 #define EASY_VECTOR_15_ADD_SELF(n) ((n) >= 0x10 && (n) <= 0x1e \
67 #define min(A,B) ((A) < (B) ? (A) : (B))
68 #define max(A,B) ((A) > (B) ? (A) : (B))
70 /* Structure used to define the rs6000 stack */
71 typedef struct rs6000_stack {
72 int first_gp_reg_save; /* first callee saved GP register used */
73 int first_fp_reg_save; /* first callee saved FP register used */
74 int first_altivec_reg_save; /* first callee saved AltiVec register used */
75 int lr_save_p; /* true if the link reg needs to be saved */
76 int cr_save_p; /* true if the CR reg needs to be saved */
77 unsigned int vrsave_mask; /* mask of vec registers to save */
78 int toc_save_p; /* true if the TOC needs to be saved */
79 int push_p; /* true if we need to allocate stack space */
80 int calls_p; /* true if the function makes any calls */
81 enum rs6000_abi abi; /* which ABI to use */
82 int gp_save_offset; /* offset to save GP regs from initial SP */
83 int fp_save_offset; /* offset to save FP regs from initial SP */
84 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
85 int lr_save_offset; /* offset to save LR from initial SP */
86 int cr_save_offset; /* offset to save CR from initial SP */
87 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
88 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
89 int toc_save_offset; /* offset to save the TOC pointer */
90 int varargs_save_offset; /* offset to save the varargs registers */
91 int ehrd_offset; /* offset to EH return data */
92 int reg_size; /* register size (4 or 8) */
93 int varargs_size; /* size to hold V.4 args passed in regs */
94 HOST_WIDE_INT vars_size; /* variable save area size */
95 int parm_size; /* outgoing parameter size */
96 int save_size; /* save area size */
97 int fixed_size; /* fixed size of stack frame */
98 int gp_size; /* size of saved GP registers */
99 int fp_size; /* size of saved FP registers */
100 int altivec_size; /* size of saved AltiVec registers */
101 int cr_size; /* size to hold CR if not in save_size */
102 int lr_size; /* size to hold LR if not in save_size */
103 int vrsave_size; /* size to hold VRSAVE if not in save_size */
104 int altivec_padding_size; /* size of altivec alignment padding if
106 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
107 int spe_padding_size;
108 int toc_size; /* size to hold TOC if not in save_size */
109 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
110 int spe_64bit_regs_used;
113 /* Target cpu type */
115 enum processor_type rs6000_cpu;
116 struct rs6000_cpu_select rs6000_select[3] =
118 /* switch name, tune arch */
119 { (const char *)0, "--with-cpu=", 1, 1 },
120 { (const char *)0, "-mcpu=", 1, 1 },
121 { (const char *)0, "-mtune=", 1, 0 },
124 /* Always emit branch hint bits. */
125 static GTY(()) bool rs6000_always_hint;
127 /* Schedule instructions for group formation. */
128 static GTY(()) bool rs6000_sched_groups;
130 /* Support adjust_priority scheduler hook
131 and -mprioritize-restricted-insns= option. */
132 const char *rs6000_sched_restricted_insns_priority_str;
133 int rs6000_sched_restricted_insns_priority;
135 /* Support for -msched-costly-dep option. */
136 const char *rs6000_sched_costly_dep_str;
137 enum rs6000_dependence_cost rs6000_sched_costly_dep;
139 /* Support for -minsert-sched-nops option. */
140 const char *rs6000_sched_insert_nops_str;
141 enum rs6000_nop_insertion rs6000_sched_insert_nops;
143 /* Size of long double */
144 const char *rs6000_long_double_size_string;
145 int rs6000_long_double_type_size;
147 /* Whether -mabi=altivec has appeared */
148 int rs6000_altivec_abi;
150 /* Whether VRSAVE instructions should be generated. */
151 int rs6000_altivec_vrsave;
153 /* String from -mvrsave= option. */
154 const char *rs6000_altivec_vrsave_string;
156 /* Nonzero if we want SPE ABI extensions. */
159 /* Whether isel instructions should be generated. */
162 /* Whether SPE simd instructions should be generated. */
165 /* Nonzero if floating point operations are done in the GPRs. */
166 int rs6000_float_gprs = 0;
168 /* String from -mfloat-gprs=. */
169 const char *rs6000_float_gprs_string;
171 /* String from -misel=. */
172 const char *rs6000_isel_string;
174 /* String from -mspe=. */
175 const char *rs6000_spe_string;
177 /* Set to nonzero once AIX common-mode calls have been defined. */
178 static GTY(()) int common_mode_defined;
180 /* Save information from a "cmpxx" operation until the branch or scc is
182 rtx rs6000_compare_op0, rs6000_compare_op1;
183 int rs6000_compare_fp_p;
185 /* Label number of label created for -mrelocatable, to call to so we can
186 get the address of the GOT section */
187 int rs6000_pic_labelno;
190 /* Which abi to adhere to */
191 const char *rs6000_abi_name;
193 /* Semantics of the small data area */
194 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
196 /* Which small data model to use */
197 const char *rs6000_sdata_name = (char *)0;
199 /* Counter for labels which are to be placed in .fixup. */
200 int fixuplabelno = 0;
203 /* Bit size of immediate TLS offsets and string from which it is decoded. */
204 int rs6000_tls_size = 32;
205 const char *rs6000_tls_size_string;
207 /* ABI enumeration available for subtarget to use. */
208 enum rs6000_abi rs6000_current_abi;
210 /* ABI string from -mabi= option. */
211 const char *rs6000_abi_string;
214 const char *rs6000_debug_name;
215 int rs6000_debug_stack; /* debug stack applications */
216 int rs6000_debug_arg; /* debug argument handling */
219 static GTY(()) tree opaque_V2SI_type_node;
220 static GTY(()) tree opaque_V2SF_type_node;
221 static GTY(()) tree opaque_p_V2SI_type_node;
223 /* AltiVec requires a few more basic types in addition to the vector
224 types already defined in tree.c. */
225 static GTY(()) tree bool_char_type_node; /* __bool char */
226 static GTY(()) tree bool_short_type_node; /* __bool short */
227 static GTY(()) tree bool_int_type_node; /* __bool int */
228 static GTY(()) tree pixel_type_node; /* __pixel */
229 static GTY(()) tree bool_V16QI_type_node; /* __vector __bool char */
230 static GTY(()) tree bool_V8HI_type_node; /* __vector __bool short */
231 static GTY(()) tree bool_V4SI_type_node; /* __vector __bool int */
232 static GTY(()) tree pixel_V8HI_type_node; /* __vector __pixel */
234 int rs6000_warn_altivec_long = 1; /* On by default. */
235 const char *rs6000_warn_altivec_long_switch;
237 const char *rs6000_traceback_name;
239 traceback_default = 0,
245 /* Flag to say the TOC is initialized */
247 char toc_label_name[10];
249 /* Alias set for saves and restores from the rs6000 stack. */
250 static GTY(()) int rs6000_sr_alias_set;
252 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
253 The only place that looks at this is rs6000_set_default_type_attributes;
254 everywhere else should rely on the presence or absence of a longcall
255 attribute on the function declaration. Exception: init_cumulative_args
256 looks at it too, for libcalls. */
257 int rs6000_default_long_calls;
258 const char *rs6000_longcall_switch;
260 /* Control alignment for fields within structures. */
261 /* String from -malign-XXXXX. */
262 const char *rs6000_alignment_string;
263 int rs6000_alignment_flags;
265 struct builtin_description
267 /* mask is not const because we're going to alter it below. This
268 nonsense will go away when we rewrite the -march infrastructure
269 to give us more target flag bits. */
271 const enum insn_code icode;
272 const char *const name;
273 const enum rs6000_builtins code;
276 static bool rs6000_function_ok_for_sibcall (tree, tree);
277 static int num_insns_constant_wide (HOST_WIDE_INT);
278 static void validate_condition_mode (enum rtx_code, enum machine_mode);
279 static rtx rs6000_generate_compare (enum rtx_code);
280 static void rs6000_maybe_dead (rtx);
281 static void rs6000_emit_stack_tie (void);
282 static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
283 static rtx spe_synthesize_frame_save (rtx);
284 static bool spe_func_has_64bit_regs_p (void);
285 static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
287 static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
288 static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
289 static unsigned rs6000_hash_constant (rtx);
290 static unsigned toc_hash_function (const void *);
291 static int toc_hash_eq (const void *, const void *);
292 static int constant_pool_expr_1 (rtx, int *, int *);
293 static bool constant_pool_expr_p (rtx);
294 static bool toc_relative_expr_p (rtx);
295 static bool legitimate_small_data_p (enum machine_mode, rtx);
296 static bool legitimate_offset_address_p (enum machine_mode, rtx, int);
297 static bool legitimate_indexed_address_p (rtx, int);
298 static bool legitimate_indirect_address_p (rtx, int);
299 static bool macho_lo_sum_memory_operand (rtx x, enum machine_mode mode);
300 static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
301 static struct machine_function * rs6000_init_machine_status (void);
302 static bool rs6000_assemble_integer (rtx, unsigned int, int);
303 #ifdef HAVE_GAS_HIDDEN
304 static void rs6000_assemble_visibility (tree, int);
306 static int rs6000_ra_ever_killed (void);
307 static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
308 static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
309 static const char *rs6000_mangle_fundamental_type (tree);
310 extern const struct attribute_spec rs6000_attribute_table[];
311 static void rs6000_set_default_type_attributes (tree);
312 static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
313 static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
314 static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
316 static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
317 static bool rs6000_return_in_memory (tree, tree);
318 static void rs6000_file_start (void);
320 static unsigned int rs6000_elf_section_type_flags (tree, const char *, int);
321 static void rs6000_elf_asm_out_constructor (rtx, int);
322 static void rs6000_elf_asm_out_destructor (rtx, int);
323 static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
324 static void rs6000_elf_select_section (tree, int, unsigned HOST_WIDE_INT);
325 static void rs6000_elf_unique_section (tree, int);
326 static void rs6000_elf_select_rtx_section (enum machine_mode, rtx,
327 unsigned HOST_WIDE_INT);
328 static void rs6000_elf_encode_section_info (tree, rtx, int)
330 static bool rs6000_elf_in_small_data_p (tree);
333 static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
334 static void rs6000_xcoff_asm_named_section (const char *, unsigned int);
335 static void rs6000_xcoff_select_section (tree, int, unsigned HOST_WIDE_INT);
336 static void rs6000_xcoff_unique_section (tree, int);
337 static void rs6000_xcoff_select_rtx_section (enum machine_mode, rtx,
338 unsigned HOST_WIDE_INT);
339 static const char * rs6000_xcoff_strip_name_encoding (const char *);
340 static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
341 static void rs6000_xcoff_file_start (void);
342 static void rs6000_xcoff_file_end (void);
345 static bool rs6000_binds_local_p (tree);
347 static int rs6000_use_dfa_pipeline_interface (void);
348 static int rs6000_variable_issue (FILE *, int, rtx, int);
349 static bool rs6000_rtx_costs (rtx, int, int, int *);
350 static int rs6000_adjust_cost (rtx, rtx, rtx, int);
351 static bool is_microcoded_insn (rtx);
352 static int is_dispatch_slot_restricted (rtx);
353 static bool is_cracked_insn (rtx);
354 static bool is_branch_slot_insn (rtx);
355 static int rs6000_adjust_priority (rtx, int);
356 static int rs6000_issue_rate (void);
357 static bool rs6000_is_costly_dependence (rtx, rtx, rtx, int, int);
358 static rtx get_next_active_insn (rtx, rtx);
359 static bool insn_terminates_group_p (rtx , enum group_termination);
360 static bool is_costly_group (rtx *, rtx);
361 static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
362 static int redefine_groups (FILE *, int, rtx, rtx);
363 static int pad_groups (FILE *, int, rtx, rtx);
364 static void rs6000_sched_finish (FILE *, int);
365 static int rs6000_use_sched_lookahead (void);
367 static void rs6000_init_builtins (void);
368 static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
369 static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
370 static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
371 static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
372 static void altivec_init_builtins (void);
373 static void rs6000_common_init_builtins (void);
374 static void rs6000_init_libfuncs (void);
376 static void enable_mask_for_builtins (struct builtin_description *, int,
377 enum rs6000_builtins,
378 enum rs6000_builtins);
379 static void spe_init_builtins (void);
380 static rtx spe_expand_builtin (tree, rtx, bool *);
381 static rtx spe_expand_stv_builtin (enum insn_code, tree);
382 static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
383 static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
384 static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
385 static rs6000_stack_t *rs6000_stack_info (void);
386 static void debug_stack_info (rs6000_stack_t *);
388 static rtx altivec_expand_builtin (tree, rtx, bool *);
389 static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
390 static rtx altivec_expand_st_builtin (tree, rtx, bool *);
391 static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
392 static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
393 static rtx altivec_expand_predicate_builtin (enum insn_code,
394 const char *, tree, rtx);
395 static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
396 static rtx altivec_expand_stv_builtin (enum insn_code, tree);
397 static void rs6000_parse_abi_options (void);
398 static void rs6000_parse_alignment_option (void);
399 static void rs6000_parse_tls_size_option (void);
400 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
401 static int first_altivec_reg_to_save (void);
402 static unsigned int compute_vrsave_mask (void);
403 static void is_altivec_return_reg (rtx, void *);
404 static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
405 int easy_vector_constant (rtx, enum machine_mode);
406 static int easy_vector_same (rtx, enum machine_mode);
407 static int easy_vector_splat_const (int, enum machine_mode);
408 static bool is_ev64_opaque_type (tree);
409 static rtx rs6000_dwarf_register_span (rtx);
410 static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
411 static rtx rs6000_tls_get_addr (void);
412 static rtx rs6000_got_sym (void);
413 static inline int rs6000_tls_symbol_ref_1 (rtx *, void *);
414 static const char *rs6000_get_some_local_dynamic_name (void);
415 static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
416 static rtx rs6000_complex_function_value (enum machine_mode);
417 static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
418 enum machine_mode, tree);
419 static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
420 static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
421 static void setup_incoming_varargs (CUMULATIVE_ARGS *,
422 enum machine_mode, tree,
425 static void macho_branch_islands (void);
426 static void add_compiler_branch_island (tree, tree, int);
427 static int no_previous_def (tree function_name);
428 static tree get_prev_label (tree function_name);
431 static tree rs6000_build_builtin_va_list (void);
433 /* Hash table stuff for keeping track of TOC entries. */
435 struct toc_hash_struct GTY(())
437 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
438 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
440 enum machine_mode key_mode;
444 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
446 /* Default register names. */
447 char rs6000_reg_names[][8] =
449 "0", "1", "2", "3", "4", "5", "6", "7",
450 "8", "9", "10", "11", "12", "13", "14", "15",
451 "16", "17", "18", "19", "20", "21", "22", "23",
452 "24", "25", "26", "27", "28", "29", "30", "31",
453 "0", "1", "2", "3", "4", "5", "6", "7",
454 "8", "9", "10", "11", "12", "13", "14", "15",
455 "16", "17", "18", "19", "20", "21", "22", "23",
456 "24", "25", "26", "27", "28", "29", "30", "31",
457 "mq", "lr", "ctr","ap",
458 "0", "1", "2", "3", "4", "5", "6", "7",
460 /* AltiVec registers. */
461 "0", "1", "2", "3", "4", "5", "6", "7",
462 "8", "9", "10", "11", "12", "13", "14", "15",
463 "16", "17", "18", "19", "20", "21", "22", "23",
464 "24", "25", "26", "27", "28", "29", "30", "31",
470 #ifdef TARGET_REGNAMES
471 static const char alt_reg_names[][8] =
473 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
474 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
475 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
476 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
477 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
478 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
479 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
480 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
481 "mq", "lr", "ctr", "ap",
482 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
484 /* AltiVec registers. */
485 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
486 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
487 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
488 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
495 #ifndef MASK_STRICT_ALIGN
496 #define MASK_STRICT_ALIGN 0
498 #ifndef TARGET_PROFILE_KERNEL
499 #define TARGET_PROFILE_KERNEL 0
502 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
503 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
505 /* Return 1 for a symbol ref for a thread-local storage symbol. */
506 #define RS6000_SYMBOL_REF_TLS_P(RTX) \
507 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
509 /* Initialize the GCC target structure. */
510 #undef TARGET_ATTRIBUTE_TABLE
511 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
512 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
513 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
515 #undef TARGET_ASM_ALIGNED_DI_OP
516 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
518 /* Default unaligned ops are only provided for ELF. Find the ops needed
519 for non-ELF systems. */
520 #ifndef OBJECT_FORMAT_ELF
522 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
524 #undef TARGET_ASM_UNALIGNED_HI_OP
525 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
526 #undef TARGET_ASM_UNALIGNED_SI_OP
527 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
528 #undef TARGET_ASM_UNALIGNED_DI_OP
529 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
532 #undef TARGET_ASM_UNALIGNED_HI_OP
533 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
534 #undef TARGET_ASM_UNALIGNED_SI_OP
535 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
539 /* This hook deals with fixups for relocatable code and DI-mode objects
541 #undef TARGET_ASM_INTEGER
542 #define TARGET_ASM_INTEGER rs6000_assemble_integer
544 #ifdef HAVE_GAS_HIDDEN
545 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
546 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
549 #undef TARGET_HAVE_TLS
550 #define TARGET_HAVE_TLS HAVE_AS_TLS
552 #undef TARGET_CANNOT_FORCE_CONST_MEM
553 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
555 #undef TARGET_ASM_FUNCTION_PROLOGUE
556 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
557 #undef TARGET_ASM_FUNCTION_EPILOGUE
558 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
560 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
561 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
562 #undef TARGET_SCHED_VARIABLE_ISSUE
563 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
565 #undef TARGET_SCHED_ISSUE_RATE
566 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
567 #undef TARGET_SCHED_ADJUST_COST
568 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
569 #undef TARGET_SCHED_ADJUST_PRIORITY
570 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
571 #undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
572 #define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
573 #undef TARGET_SCHED_FINISH
574 #define TARGET_SCHED_FINISH rs6000_sched_finish
576 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
577 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
579 #undef TARGET_INIT_BUILTINS
580 #define TARGET_INIT_BUILTINS rs6000_init_builtins
582 #undef TARGET_EXPAND_BUILTIN
583 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
585 #undef TARGET_MANGLE_FUNDAMENTAL_TYPE
586 #define TARGET_MANGLE_FUNDAMENTAL_TYPE rs6000_mangle_fundamental_type
588 #undef TARGET_INIT_LIBFUNCS
589 #define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
592 #undef TARGET_BINDS_LOCAL_P
593 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
596 #undef TARGET_ASM_OUTPUT_MI_THUNK
597 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
599 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
600 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
602 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
603 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
605 #undef TARGET_RTX_COSTS
606 #define TARGET_RTX_COSTS rs6000_rtx_costs
607 #undef TARGET_ADDRESS_COST
608 #define TARGET_ADDRESS_COST hook_int_rtx_0
610 #undef TARGET_VECTOR_OPAQUE_P
611 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
613 #undef TARGET_DWARF_REGISTER_SPAN
614 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
616 /* On rs6000, function arguments are promoted, as are function return
618 #undef TARGET_PROMOTE_FUNCTION_ARGS
619 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
620 #undef TARGET_PROMOTE_FUNCTION_RETURN
621 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
623 /* Structure return values are passed as an extra parameter. */
624 #undef TARGET_STRUCT_VALUE_RTX
625 #define TARGET_STRUCT_VALUE_RTX hook_rtx_tree_int_null
627 #undef TARGET_RETURN_IN_MEMORY
628 #define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
630 #undef TARGET_SETUP_INCOMING_VARARGS
631 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
633 /* Always strict argument naming on rs6000. */
634 #undef TARGET_STRICT_ARGUMENT_NAMING
635 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
636 #undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
637 #define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
638 #undef TARGET_SPLIT_COMPLEX_ARG
639 #define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
641 #undef TARGET_BUILD_BUILTIN_VA_LIST
642 #define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
644 struct gcc_target targetm = TARGET_INITIALIZER;
646 /* Override command line options. Mostly we process the processor
647 type and sometimes adjust other TARGET_ options. */
650 rs6000_override_options (const char *default_cpu)
653 struct rs6000_cpu_select *ptr;
656 /* Simplifications for entries below. */
659 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
660 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
663 /* This table occasionally claims that a processor does not support
664 a particular feature even though it does, but the feature is slower
665 than the alternative. Thus, it shouldn't be relied on as a
666 complete description of the processor's support.
668 Please keep this list in order, and don't forget to update the
669 documentation in invoke.texi when adding a new processor or
673 const char *const name; /* Canonical processor name. */
674 const enum processor_type processor; /* Processor type enum value. */
675 const int target_enable; /* Target flags to enable. */
676 } const processor_target_table[]
677 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
678 {"403", PROCESSOR_PPC403,
679 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
680 {"405", PROCESSOR_PPC405, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
681 {"405fp", PROCESSOR_PPC405, POWERPC_BASE_MASK},
682 {"440", PROCESSOR_PPC440, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
683 {"440fp", PROCESSOR_PPC440, POWERPC_BASE_MASK},
684 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
685 {"601", PROCESSOR_PPC601,
686 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
687 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
688 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
689 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
690 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
691 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
692 {"620", PROCESSOR_PPC620,
693 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
694 {"630", PROCESSOR_PPC630,
695 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
696 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
697 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
698 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
699 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
700 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
701 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
702 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
703 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
704 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
705 {"970", PROCESSOR_POWER4,
706 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
707 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
708 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
709 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
710 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
711 {"G5", PROCESSOR_POWER4,
712 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
713 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
714 {"power2", PROCESSOR_POWER,
715 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
716 {"power3", PROCESSOR_PPC630,
717 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
718 {"power4", PROCESSOR_POWER4,
719 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
720 {"power5", PROCESSOR_POWER5,
721 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
722 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
723 {"powerpc64", PROCESSOR_POWERPC64,
724 POWERPC_BASE_MASK | MASK_POWERPC64},
725 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
726 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
727 {"rios2", PROCESSOR_RIOS2,
728 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
729 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
730 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
731 {"rs64a", PROCESSOR_RS64A, POWERPC_BASE_MASK | MASK_POWERPC64},
734 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
736 /* Some OSs don't support saving the high part of 64-bit registers on
737 context switch. Other OSs don't support saving Altivec registers.
738 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
739 settings; if the user wants either, the user must explicitly specify
740 them and we won't interfere with the user's specification. */
743 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
744 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT
745 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
748 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
749 #ifdef OS_MISSING_POWERPC64
750 if (OS_MISSING_POWERPC64)
751 set_masks &= ~MASK_POWERPC64;
753 #ifdef OS_MISSING_ALTIVEC
754 if (OS_MISSING_ALTIVEC)
755 set_masks &= ~MASK_ALTIVEC;
758 /* Don't override by the processor default if given explicitly. */
759 set_masks &= ~target_flags_explicit;
761 /* Identify the processor type. */
762 rs6000_select[0].string = default_cpu;
763 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
765 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
767 ptr = &rs6000_select[i];
768 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
770 for (j = 0; j < ptt_size; j++)
771 if (! strcmp (ptr->string, processor_target_table[j].name))
774 rs6000_cpu = processor_target_table[j].processor;
778 target_flags &= ~set_masks;
779 target_flags |= (processor_target_table[j].target_enable
786 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
793 /* If we are optimizing big endian systems for space, use the load/store
794 multiple and string instructions. */
795 if (BYTES_BIG_ENDIAN && optimize_size)
796 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
798 /* Don't allow -mmultiple or -mstring on little endian systems
799 unless the cpu is a 750, because the hardware doesn't support the
800 instructions used in little endian mode, and causes an alignment
801 trap. The 750 does not cause an alignment trap (except when the
802 target is unaligned). */
804 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
808 target_flags &= ~MASK_MULTIPLE;
809 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
810 warning ("-mmultiple is not supported on little endian systems");
815 target_flags &= ~MASK_STRING;
816 if ((target_flags_explicit & MASK_STRING) != 0)
817 warning ("-mstring is not supported on little endian systems");
821 /* Set debug flags */
822 if (rs6000_debug_name)
824 if (! strcmp (rs6000_debug_name, "all"))
825 rs6000_debug_stack = rs6000_debug_arg = 1;
826 else if (! strcmp (rs6000_debug_name, "stack"))
827 rs6000_debug_stack = 1;
828 else if (! strcmp (rs6000_debug_name, "arg"))
829 rs6000_debug_arg = 1;
831 error ("unknown -mdebug-%s switch", rs6000_debug_name);
834 if (rs6000_traceback_name)
836 if (! strncmp (rs6000_traceback_name, "full", 4))
837 rs6000_traceback = traceback_full;
838 else if (! strncmp (rs6000_traceback_name, "part", 4))
839 rs6000_traceback = traceback_part;
840 else if (! strncmp (rs6000_traceback_name, "no", 2))
841 rs6000_traceback = traceback_none;
843 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
844 rs6000_traceback_name);
847 /* Set size of long double */
848 rs6000_long_double_type_size = 64;
849 if (rs6000_long_double_size_string)
852 int size = strtol (rs6000_long_double_size_string, &tail, 10);
853 if (*tail != '\0' || (size != 64 && size != 128))
854 error ("Unknown switch -mlong-double-%s",
855 rs6000_long_double_size_string);
857 rs6000_long_double_type_size = size;
860 /* Set Altivec ABI as default for powerpc64 linux. */
861 if (TARGET_ELF && TARGET_64BIT)
863 rs6000_altivec_abi = 1;
864 rs6000_altivec_vrsave = 1;
867 /* Handle -mabi= options. */
868 rs6000_parse_abi_options ();
870 /* Handle -malign-XXXXX option. */
871 rs6000_parse_alignment_option ();
873 /* Handle generic -mFOO=YES/NO options. */
874 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string,
875 &rs6000_altivec_vrsave);
876 rs6000_parse_yes_no_option ("isel", rs6000_isel_string,
878 rs6000_parse_yes_no_option ("spe", rs6000_spe_string, &rs6000_spe);
879 rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string,
882 /* Handle -mtls-size option. */
883 rs6000_parse_tls_size_option ();
885 #ifdef SUBTARGET_OVERRIDE_OPTIONS
886 SUBTARGET_OVERRIDE_OPTIONS;
888 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
889 SUBSUBTARGET_OVERRIDE_OPTIONS;
895 error ("AltiVec and E500 instructions cannot coexist");
897 /* The e500 does not have string instructions, and we set
898 MASK_STRING above when optimizing for size. */
899 if ((target_flags & MASK_STRING) != 0)
900 target_flags = target_flags & ~MASK_STRING;
902 /* No SPE means 64-bit long doubles, even if an E500. */
903 if (rs6000_spe_string != 0
904 && !strcmp (rs6000_spe_string, "no"))
905 rs6000_long_double_type_size = 64;
907 else if (rs6000_select[1].string != NULL)
909 /* For the powerpc-eabispe configuration, we set all these by
910 default, so let's unset them if we manually set another
911 CPU that is not the E500. */
912 if (rs6000_abi_string == 0)
914 if (rs6000_spe_string == 0)
916 if (rs6000_float_gprs_string == 0)
917 rs6000_float_gprs = 0;
918 if (rs6000_isel_string == 0)
920 if (rs6000_long_double_size_string == 0)
921 rs6000_long_double_type_size = 64;
924 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
925 && rs6000_cpu != PROCESSOR_POWER5);
926 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
927 || rs6000_cpu == PROCESSOR_POWER5);
929 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
930 using TARGET_OPTIONS to handle a toggle switch, but we're out of
931 bits in target_flags so TARGET_SWITCHES cannot be used.
932 Assumption here is that rs6000_longcall_switch points into the
933 text of the complete option, rather than being a copy, so we can
934 scan back for the presence or absence of the no- modifier. */
935 if (rs6000_longcall_switch)
937 const char *base = rs6000_longcall_switch;
938 while (base[-1] != 'm') base--;
940 if (*rs6000_longcall_switch != '\0')
941 error ("invalid option `%s'", base);
942 rs6000_default_long_calls = (base[0] != 'n');
945 /* Handle -m(no-)warn-altivec-long similarly. */
946 if (rs6000_warn_altivec_long_switch)
948 const char *base = rs6000_warn_altivec_long_switch;
949 while (base[-1] != 'm') base--;
951 if (*rs6000_warn_altivec_long_switch != '\0')
952 error ("invalid option `%s'", base);
953 rs6000_warn_altivec_long = (base[0] != 'n');
956 /* Handle -mprioritize-restricted-insns option. */
957 rs6000_sched_restricted_insns_priority
958 = (rs6000_sched_groups ? 1 : 0);
959 if (rs6000_sched_restricted_insns_priority_str)
960 rs6000_sched_restricted_insns_priority =
961 atoi (rs6000_sched_restricted_insns_priority_str);
963 /* Handle -msched-costly-dep option. */
964 rs6000_sched_costly_dep
965 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
966 if (rs6000_sched_costly_dep_str)
968 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
969 rs6000_sched_costly_dep = no_dep_costly;
970 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
971 rs6000_sched_costly_dep = all_deps_costly;
972 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
973 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
974 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
975 rs6000_sched_costly_dep = store_to_load_dep_costly;
977 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
980 /* Handle -minsert-sched-nops option. */
981 rs6000_sched_insert_nops
982 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
983 if (rs6000_sched_insert_nops_str)
985 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
986 rs6000_sched_insert_nops = sched_finish_none;
987 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
988 rs6000_sched_insert_nops = sched_finish_pad_groups;
989 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
990 rs6000_sched_insert_nops = sched_finish_regroup_exact;
992 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
995 #ifdef TARGET_REGNAMES
996 /* If the user desires alternate register names, copy in the
997 alternate names now. */
999 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
1002 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
1003 If -maix-struct-return or -msvr4-struct-return was explicitly
1004 used, don't override with the ABI default. */
1005 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
1007 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
1008 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
1010 target_flags |= MASK_AIX_STRUCT_RET;
1013 if (TARGET_LONG_DOUBLE_128
1014 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
1015 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
1017 /* Allocate an alias set for register saves & restores from stack. */
1018 rs6000_sr_alias_set = new_alias_set ();
1021 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
1023 /* We can only guarantee the availability of DI pseudo-ops when
1024 assembling for 64-bit targets. */
1027 targetm.asm_out.aligned_op.di = NULL;
1028 targetm.asm_out.unaligned_op.di = NULL;
1031 /* Set maximum branch target alignment at two instructions, eight bytes. */
1032 align_jumps_max_skip = 8;
1033 align_loops_max_skip = 8;
1035 /* Arrange to save and restore machine status around nested functions. */
1036 init_machine_status = rs6000_init_machine_status;
1038 /* We should always be splitting complex arguments, but we can't break
1039 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
1040 if (DEFAULT_ABI != ABI_AIX)
1041 targetm.calls.split_complex_arg = NULL;
1044 /* Handle generic options of the form -mfoo=yes/no.
1045 NAME is the option name.
1046 VALUE is the option value.
1047 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1048 whether the option value is 'yes' or 'no' respectively. */
1050 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
1054 else if (!strcmp (value, "yes"))
1056 else if (!strcmp (value, "no"))
1059 error ("unknown -m%s= option specified: '%s'", name, value);
1062 /* Handle -mabi= options. */
1064 rs6000_parse_abi_options (void)
1066 if (rs6000_abi_string == 0)
1068 else if (! strcmp (rs6000_abi_string, "altivec"))
1070 rs6000_altivec_abi = 1;
1073 else if (! strcmp (rs6000_abi_string, "no-altivec"))
1074 rs6000_altivec_abi = 0;
1075 else if (! strcmp (rs6000_abi_string, "spe"))
1078 rs6000_altivec_abi = 0;
1079 if (!TARGET_SPE_ABI)
1080 error ("not configured for ABI: '%s'", rs6000_abi_string);
1083 else if (! strcmp (rs6000_abi_string, "no-spe"))
1086 error ("unknown ABI specified: '%s'", rs6000_abi_string);
1089 /* Handle -malign-XXXXXX options. */
1091 rs6000_parse_alignment_option (void)
1093 if (rs6000_alignment_string == 0)
1095 else if (! strcmp (rs6000_alignment_string, "power"))
1096 rs6000_alignment_flags = MASK_ALIGN_POWER;
1097 else if (! strcmp (rs6000_alignment_string, "natural"))
1098 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1100 error ("unknown -malign-XXXXX option specified: '%s'",
1101 rs6000_alignment_string);
1104 /* Validate and record the size specified with the -mtls-size option. */
1107 rs6000_parse_tls_size_option (void)
1109 if (rs6000_tls_size_string == 0)
1111 else if (strcmp (rs6000_tls_size_string, "16") == 0)
1112 rs6000_tls_size = 16;
1113 else if (strcmp (rs6000_tls_size_string, "32") == 0)
1114 rs6000_tls_size = 32;
1115 else if (strcmp (rs6000_tls_size_string, "64") == 0)
1116 rs6000_tls_size = 64;
1118 error ("bad value `%s' for -mtls-size switch", rs6000_tls_size_string);
1122 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1126 /* Do anything needed at the start of the asm file. */
1129 rs6000_file_start (void)
1133 const char *start = buffer;
1134 struct rs6000_cpu_select *ptr;
1135 const char *default_cpu = TARGET_CPU_DEFAULT;
1136 FILE *file = asm_out_file;
1138 default_file_start ();
1140 #ifdef TARGET_BI_ARCH
1141 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
1145 if (flag_verbose_asm)
1147 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
1148 rs6000_select[0].string = default_cpu;
1150 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1152 ptr = &rs6000_select[i];
1153 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1155 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
1160 #ifdef USING_ELFOS_H
1161 switch (rs6000_sdata)
1163 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
1164 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
1165 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
1166 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
1169 if (rs6000_sdata && g_switch_value)
1171 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
1182 /* Return nonzero if this function is known to have a null epilogue. */
1185 direct_return (void)
1187 if (reload_completed)
1189 rs6000_stack_t *info = rs6000_stack_info ();
1191 if (info->first_gp_reg_save == 32
1192 && info->first_fp_reg_save == 64
1193 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
1194 && ! info->lr_save_p
1195 && ! info->cr_save_p
1196 && info->vrsave_mask == 0
1204 /* Returns 1 always. */
1207 any_operand (rtx op ATTRIBUTE_UNUSED,
1208 enum machine_mode mode ATTRIBUTE_UNUSED)
1213 /* Returns 1 if op is the count register. */
1215 count_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1217 if (GET_CODE (op) != REG)
1220 if (REGNO (op) == COUNT_REGISTER_REGNUM)
1223 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
1229 /* Returns 1 if op is an altivec register. */
1231 altivec_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1234 return (register_operand (op, mode)
1235 && (GET_CODE (op) != REG
1236 || REGNO (op) > FIRST_PSEUDO_REGISTER
1237 || ALTIVEC_REGNO_P (REGNO (op))));
1241 xer_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1243 if (GET_CODE (op) != REG)
1246 if (XER_REGNO_P (REGNO (op)))
1252 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
1253 by such constants completes more quickly. */
1256 s8bit_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1258 return ( GET_CODE (op) == CONST_INT
1259 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
1262 /* Return 1 if OP is a constant that can fit in a D field. */
1265 short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1267 return (GET_CODE (op) == CONST_INT
1268 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
1271 /* Similar for an unsigned D field. */
1274 u_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1276 return (GET_CODE (op) == CONST_INT
1277 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
1280 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1283 non_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1285 return (GET_CODE (op) == CONST_INT
1286 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
1289 /* Returns 1 if OP is a CONST_INT that is a positive value
1290 and an exact power of 2. */
1293 exact_log2_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1295 return (GET_CODE (op) == CONST_INT
1297 && exact_log2 (INTVAL (op)) >= 0);
1300 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1304 gpc_reg_operand (rtx op, enum machine_mode mode)
1306 return (register_operand (op, mode)
1307 && (GET_CODE (op) != REG
1308 || (REGNO (op) >= ARG_POINTER_REGNUM
1309 && !XER_REGNO_P (REGNO (op)))
1310 || REGNO (op) < MQ_REGNO));
1313 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1317 cc_reg_operand (rtx op, enum machine_mode mode)
1319 return (register_operand (op, mode)
1320 && (GET_CODE (op) != REG
1321 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1322 || CR_REGNO_P (REGNO (op))));
1325 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1326 CR field that isn't CR0. */
1329 cc_reg_not_cr0_operand (rtx op, enum machine_mode mode)
1331 return (register_operand (op, mode)
1332 && (GET_CODE (op) != REG
1333 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1334 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1337 /* Returns 1 if OP is either a constant integer valid for a D-field or
1338 a non-special register. If a register, it must be in the proper
1339 mode unless MODE is VOIDmode. */
1342 reg_or_short_operand (rtx op, enum machine_mode mode)
1344 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1347 /* Similar, except check if the negation of the constant would be
1348 valid for a D-field. Don't allow a constant zero, since all the
1349 patterns that call this predicate use "addic r1,r2,-constant" on
1350 a constant value to set a carry when r2 is greater or equal to
1351 "constant". That doesn't work for zero. */
1354 reg_or_neg_short_operand (rtx op, enum machine_mode mode)
1356 if (GET_CODE (op) == CONST_INT)
1357 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P') && INTVAL (op) != 0;
1359 return gpc_reg_operand (op, mode);
1362 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1363 a non-special register. If a register, it must be in the proper
1364 mode unless MODE is VOIDmode. */
1367 reg_or_aligned_short_operand (rtx op, enum machine_mode mode)
1369 if (gpc_reg_operand (op, mode))
1371 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1378 /* Return 1 if the operand is either a register or an integer whose
1379 high-order 16 bits are zero. */
1382 reg_or_u_short_operand (rtx op, enum machine_mode mode)
1384 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1387 /* Return 1 is the operand is either a non-special register or ANY
1388 constant integer. */
1391 reg_or_cint_operand (rtx op, enum machine_mode mode)
1393 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1396 /* Return 1 is the operand is either a non-special register or ANY
1397 32-bit signed constant integer. */
1400 reg_or_arith_cint_operand (rtx op, enum machine_mode mode)
1402 return (gpc_reg_operand (op, mode)
1403 || (GET_CODE (op) == CONST_INT
1404 #if HOST_BITS_PER_WIDE_INT != 32
1405 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1406 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1411 /* Return 1 is the operand is either a non-special register or a 32-bit
1412 signed constant integer valid for 64-bit addition. */
1415 reg_or_add_cint64_operand (rtx op, enum machine_mode mode)
1417 return (gpc_reg_operand (op, mode)
1418 || (GET_CODE (op) == CONST_INT
1419 #if HOST_BITS_PER_WIDE_INT == 32
1420 && INTVAL (op) < 0x7fff8000
1422 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1428 /* Return 1 is the operand is either a non-special register or a 32-bit
1429 signed constant integer valid for 64-bit subtraction. */
1432 reg_or_sub_cint64_operand (rtx op, enum machine_mode mode)
1434 return (gpc_reg_operand (op, mode)
1435 || (GET_CODE (op) == CONST_INT
1436 #if HOST_BITS_PER_WIDE_INT == 32
1437 && (- INTVAL (op)) < 0x7fff8000
1439 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1445 /* Return 1 is the operand is either a non-special register or ANY
1446 32-bit unsigned constant integer. */
1449 reg_or_logical_cint_operand (rtx op, enum machine_mode mode)
1451 if (GET_CODE (op) == CONST_INT)
1453 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1455 if (GET_MODE_BITSIZE (mode) <= 32)
1458 if (INTVAL (op) < 0)
1462 return ((INTVAL (op) & GET_MODE_MASK (mode)
1463 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1465 else if (GET_CODE (op) == CONST_DOUBLE)
1467 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1471 return CONST_DOUBLE_HIGH (op) == 0;
1474 return gpc_reg_operand (op, mode);
1477 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1480 got_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1482 return (GET_CODE (op) == SYMBOL_REF
1483 || GET_CODE (op) == CONST
1484 || GET_CODE (op) == LABEL_REF);
1487 /* Return 1 if the operand is a simple references that can be loaded via
1488 the GOT (labels involving addition aren't allowed). */
1491 got_no_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1493 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1496 /* Return the number of instructions it takes to form a constant in an
1497 integer register. */
1500 num_insns_constant_wide (HOST_WIDE_INT value)
1502 /* signed constant loadable with {cal|addi} */
1503 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1506 /* constant loadable with {cau|addis} */
1507 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1510 #if HOST_BITS_PER_WIDE_INT == 64
1511 else if (TARGET_POWERPC64)
1513 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1514 HOST_WIDE_INT high = value >> 31;
1516 if (high == 0 || high == -1)
1522 return num_insns_constant_wide (high) + 1;
1524 return (num_insns_constant_wide (high)
1525 + num_insns_constant_wide (low) + 1);
1534 num_insns_constant (rtx op, enum machine_mode mode)
1536 if (GET_CODE (op) == CONST_INT)
1538 #if HOST_BITS_PER_WIDE_INT == 64
1539 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1540 && mask64_operand (op, mode))
1544 return num_insns_constant_wide (INTVAL (op));
1547 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1552 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1553 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1554 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1557 else if (GET_CODE (op) == CONST_DOUBLE)
1563 int endian = (WORDS_BIG_ENDIAN == 0);
1565 if (mode == VOIDmode || mode == DImode)
1567 high = CONST_DOUBLE_HIGH (op);
1568 low = CONST_DOUBLE_LOW (op);
1572 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1573 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1575 low = l[1 - endian];
1579 return (num_insns_constant_wide (low)
1580 + num_insns_constant_wide (high));
1584 if (high == 0 && low >= 0)
1585 return num_insns_constant_wide (low);
1587 else if (high == -1 && low < 0)
1588 return num_insns_constant_wide (low);
1590 else if (mask64_operand (op, mode))
1594 return num_insns_constant_wide (high) + 1;
1597 return (num_insns_constant_wide (high)
1598 + num_insns_constant_wide (low) + 1);
1606 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1607 register with one instruction per word. We only do this if we can
1608 safely read CONST_DOUBLE_{LOW,HIGH}. */
1611 easy_fp_constant (rtx op, enum machine_mode mode)
1613 if (GET_CODE (op) != CONST_DOUBLE
1614 || GET_MODE (op) != mode
1615 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1618 /* Consider all constants with -msoft-float to be easy. */
1619 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1623 /* If we are using V.4 style PIC, consider all constants to be hard. */
1624 if (flag_pic && DEFAULT_ABI == ABI_V4)
1627 #ifdef TARGET_RELOCATABLE
1628 /* Similarly if we are using -mrelocatable, consider all constants
1630 if (TARGET_RELOCATABLE)
1639 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1640 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1642 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1643 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1644 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1645 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1648 else if (mode == DFmode)
1653 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1654 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1656 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1657 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1660 else if (mode == SFmode)
1665 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1666 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1668 return num_insns_constant_wide (l) == 1;
1671 else if (mode == DImode)
1672 return ((TARGET_POWERPC64
1673 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1674 || (num_insns_constant (op, DImode) <= 2));
1676 else if (mode == SImode)
1682 /* Returns the constant for the splat instrunction, if exists. */
1685 easy_vector_splat_const (int cst, enum machine_mode mode)
1690 if (EASY_VECTOR_15 (cst)
1691 || EASY_VECTOR_15_ADD_SELF (cst))
1693 if ((cst & 0xffff) != ((cst >> 16) & 0xffff))
1697 if (EASY_VECTOR_15 (cst)
1698 || EASY_VECTOR_15_ADD_SELF (cst))
1700 if ((cst & 0xff) != ((cst >> 8) & 0xff))
1704 if (EASY_VECTOR_15 (cst)
1705 || EASY_VECTOR_15_ADD_SELF (cst))
1714 /* Return nonzero if all elements of a vector have the same value. */
1717 easy_vector_same (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1721 units = CONST_VECTOR_NUNITS (op);
1723 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1724 for (i = 1; i < units; ++i)
1725 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
1727 if (i == units && easy_vector_splat_const (cst, mode))
1732 /* Return 1 if the operand is a CONST_INT and can be put into a
1733 register without using memory. */
1736 easy_vector_constant (rtx op, enum machine_mode mode)
1740 if (GET_CODE (op) != CONST_VECTOR
1745 if (zero_constant (op, mode)
1746 && ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
1747 || (TARGET_SPE && SPE_VECTOR_MODE (mode))))
1750 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
1753 if (TARGET_SPE && mode == V1DImode)
1756 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1757 cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
1759 /* Limit SPE vectors to 15 bits signed. These we can generate with:
1761 evmergelo r0, r0, r0
1764 I don't know how efficient it would be to allow bigger constants,
1765 considering we'll have an extra 'ori' for every 'li'. I doubt 5
1766 instructions is better than a 64-bit memory load, but I don't
1767 have the e500 timing specs. */
1768 if (TARGET_SPE && mode == V2SImode
1769 && cst >= -0x7fff && cst <= 0x7fff
1770 && cst2 >= -0x7fff && cst2 <= 0x7fff)
1774 && easy_vector_same (op, mode))
1776 cst = easy_vector_splat_const (cst, mode);
1777 if (EASY_VECTOR_15_ADD_SELF (cst)
1778 || EASY_VECTOR_15 (cst))
1784 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
1787 easy_vector_constant_add_self (rtx op, enum machine_mode mode)
1791 && GET_CODE (op) == CONST_VECTOR
1792 && easy_vector_same (op, mode))
1794 cst = easy_vector_splat_const (INTVAL (CONST_VECTOR_ELT (op, 0)), mode);
1795 if (EASY_VECTOR_15_ADD_SELF (cst))
1801 /* Generate easy_vector_constant out of a easy_vector_constant_add_self. */
1804 gen_easy_vector_constant_add_self (rtx op)
1808 units = GET_MODE_NUNITS (GET_MODE (op));
1809 v = rtvec_alloc (units);
1811 for (i = 0; i < units; i++)
1813 GEN_INT (INTVAL (CONST_VECTOR_ELT (op, i)) >> 1);
1814 return gen_rtx_raw_CONST_VECTOR (GET_MODE (op), v);
1818 output_vec_const_move (rtx *operands)
1821 enum machine_mode mode;
1827 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
1828 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
1829 mode = GET_MODE (dest);
1833 if (zero_constant (vec, mode))
1834 return "vxor %0,%0,%0";
1835 else if (easy_vector_constant (vec, mode))
1837 operands[1] = GEN_INT (cst);
1841 if (EASY_VECTOR_15 (cst))
1843 operands[1] = GEN_INT (cst);
1844 return "vspltisw %0,%1";
1846 else if (EASY_VECTOR_15_ADD_SELF (cst))
1850 if (EASY_VECTOR_15 (cst))
1852 operands[1] = GEN_INT (cst);
1853 return "vspltish %0,%1";
1855 else if (EASY_VECTOR_15_ADD_SELF (cst))
1859 if (EASY_VECTOR_15 (cst))
1861 operands[1] = GEN_INT (cst);
1862 return "vspltisb %0,%1";
1864 else if (EASY_VECTOR_15_ADD_SELF (cst))
1876 /* Vector constant 0 is handled as a splitter of V2SI, and in the
1877 pattern of V1DI, V4HI, and V2SF.
1879 FIXME: We should probably return # and add post reload
1880 splitters for these, but this way is so easy ;-).
1882 operands[1] = GEN_INT (cst);
1883 operands[2] = GEN_INT (cst2);
1885 return "li %0,%1\n\tevmergelo %0,%0,%0";
1887 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
1893 /* Return 1 if the operand is the constant 0. This works for scalars
1894 as well as vectors. */
1896 zero_constant (rtx op, enum machine_mode mode)
1898 return op == CONST0_RTX (mode);
1901 /* Return 1 if the operand is 0.0. */
1903 zero_fp_constant (rtx op, enum machine_mode mode)
1905 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1908 /* Return 1 if the operand is in volatile memory. Note that during
1909 the RTL generation phase, memory_operand does not return TRUE for
1910 volatile memory references. So this function allows us to
1911 recognize volatile references where its safe. */
1914 volatile_mem_operand (rtx op, enum machine_mode mode)
1916 if (GET_CODE (op) != MEM)
1919 if (!MEM_VOLATILE_P (op))
1922 if (mode != GET_MODE (op))
1925 if (reload_completed)
1926 return memory_operand (op, mode);
1928 if (reload_in_progress)
1929 return strict_memory_address_p (mode, XEXP (op, 0));
1931 return memory_address_p (mode, XEXP (op, 0));
1934 /* Return 1 if the operand is an offsettable memory operand. */
1937 offsettable_mem_operand (rtx op, enum machine_mode mode)
1939 return ((GET_CODE (op) == MEM)
1940 && offsettable_address_p (reload_completed || reload_in_progress,
1941 mode, XEXP (op, 0)));
1944 /* Return 1 if the operand is either an easy FP constant (see above) or
1948 mem_or_easy_const_operand (rtx op, enum machine_mode mode)
1950 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1953 /* Return 1 if the operand is either a non-special register or an item
1954 that can be used as the operand of a `mode' add insn. */
1957 add_operand (rtx op, enum machine_mode mode)
1959 if (GET_CODE (op) == CONST_INT)
1960 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1961 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1963 return gpc_reg_operand (op, mode);
1966 /* Return 1 if OP is a constant but not a valid add_operand. */
1969 non_add_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1971 return (GET_CODE (op) == CONST_INT
1972 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1973 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1976 /* Return 1 if the operand is a non-special register or a constant that
1977 can be used as the operand of an OR or XOR insn on the RS/6000. */
1980 logical_operand (rtx op, enum machine_mode mode)
1982 HOST_WIDE_INT opl, oph;
1984 if (gpc_reg_operand (op, mode))
1987 if (GET_CODE (op) == CONST_INT)
1989 opl = INTVAL (op) & GET_MODE_MASK (mode);
1991 #if HOST_BITS_PER_WIDE_INT <= 32
1992 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1996 else if (GET_CODE (op) == CONST_DOUBLE)
1998 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2001 opl = CONST_DOUBLE_LOW (op);
2002 oph = CONST_DOUBLE_HIGH (op);
2009 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
2010 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
2013 /* Return 1 if C is a constant that is not a logical operand (as
2014 above), but could be split into one. */
2017 non_logical_cint_operand (rtx op, enum machine_mode mode)
2019 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
2020 && ! logical_operand (op, mode)
2021 && reg_or_logical_cint_operand (op, mode));
2024 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
2025 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
2026 Reject all ones and all zeros, since these should have been optimized
2027 away and confuse the making of MB and ME. */
2030 mask_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2032 HOST_WIDE_INT c, lsb;
2034 if (GET_CODE (op) != CONST_INT)
2039 /* Fail in 64-bit mode if the mask wraps around because the upper
2040 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
2041 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
2044 /* We don't change the number of transitions by inverting,
2045 so make sure we start with the LS bit zero. */
2049 /* Reject all zeros or all ones. */
2053 /* Find the first transition. */
2056 /* Invert to look for a second transition. */
2059 /* Erase first transition. */
2062 /* Find the second transition (if any). */
2065 /* Match if all the bits above are 1's (or c is zero). */
2069 /* Return 1 for the PowerPC64 rlwinm corner case. */
2072 mask_operand_wrap (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2074 HOST_WIDE_INT c, lsb;
2076 if (GET_CODE (op) != CONST_INT)
2081 if ((c & 0x80000001) != 0x80000001)
2095 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
2096 It is if there are no more than one 1->0 or 0->1 transitions.
2097 Reject all zeros, since zero should have been optimized away and
2098 confuses the making of MB and ME. */
2101 mask64_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2103 if (GET_CODE (op) == CONST_INT)
2105 HOST_WIDE_INT c, lsb;
2109 /* Reject all zeros. */
2113 /* We don't change the number of transitions by inverting,
2114 so make sure we start with the LS bit zero. */
2118 /* Find the transition, and check that all bits above are 1's. */
2121 /* Match if all the bits above are 1's (or c is zero). */
2127 /* Like mask64_operand, but allow up to three transitions. This
2128 predicate is used by insn patterns that generate two rldicl or
2129 rldicr machine insns. */
2132 mask64_2_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2134 if (GET_CODE (op) == CONST_INT)
2136 HOST_WIDE_INT c, lsb;
2140 /* Disallow all zeros. */
2144 /* We don't change the number of transitions by inverting,
2145 so make sure we start with the LS bit zero. */
2149 /* Find the first transition. */
2152 /* Invert to look for a second transition. */
2155 /* Erase first transition. */
2158 /* Find the second transition. */
2161 /* Invert to look for a third transition. */
2164 /* Erase second transition. */
2167 /* Find the third transition (if any). */
2170 /* Match if all the bits above are 1's (or c is zero). */
2176 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
2177 implement ANDing by the mask IN. */
2179 build_mask64_2_operands (rtx in, rtx *out)
2181 #if HOST_BITS_PER_WIDE_INT >= 64
2182 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2185 if (GET_CODE (in) != CONST_INT)
2191 /* Assume c initially something like 0x00fff000000fffff. The idea
2192 is to rotate the word so that the middle ^^^^^^ group of zeros
2193 is at the MS end and can be cleared with an rldicl mask. We then
2194 rotate back and clear off the MS ^^ group of zeros with a
2196 c = ~c; /* c == 0xff000ffffff00000 */
2197 lsb = c & -c; /* lsb == 0x0000000000100000 */
2198 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2199 c = ~c; /* c == 0x00fff000000fffff */
2200 c &= -lsb; /* c == 0x00fff00000000000 */
2201 lsb = c & -c; /* lsb == 0x0000100000000000 */
2202 c = ~c; /* c == 0xff000fffffffffff */
2203 c &= -lsb; /* c == 0xff00000000000000 */
2205 while ((lsb >>= 1) != 0)
2206 shift++; /* shift == 44 on exit from loop */
2207 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2208 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2209 m2 = ~c; /* m2 == 0x00ffffffffffffff */
2213 /* Assume c initially something like 0xff000f0000000000. The idea
2214 is to rotate the word so that the ^^^ middle group of zeros
2215 is at the LS end and can be cleared with an rldicr mask. We then
2216 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2218 lsb = c & -c; /* lsb == 0x0000010000000000 */
2219 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2220 c = ~c; /* c == 0x00fff0ffffffffff */
2221 c &= -lsb; /* c == 0x00fff00000000000 */
2222 lsb = c & -c; /* lsb == 0x0000100000000000 */
2223 c = ~c; /* c == 0xff000fffffffffff */
2224 c &= -lsb; /* c == 0xff00000000000000 */
2226 while ((lsb >>= 1) != 0)
2227 shift++; /* shift == 44 on exit from loop */
2228 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2229 m1 >>= shift; /* m1 == 0x0000000000000fff */
2230 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2233 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2234 masks will be all 1's. We are guaranteed more than one transition. */
2235 out[0] = GEN_INT (64 - shift);
2236 out[1] = GEN_INT (m1);
2237 out[2] = GEN_INT (shift);
2238 out[3] = GEN_INT (m2);
2246 /* Return 1 if the operand is either a non-special register or a constant
2247 that can be used as the operand of a PowerPC64 logical AND insn. */
2250 and64_operand (rtx op, enum machine_mode mode)
2252 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2253 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
2255 return (logical_operand (op, mode) || mask64_operand (op, mode));
2258 /* Like the above, but also match constants that can be implemented
2259 with two rldicl or rldicr insns. */
2262 and64_2_operand (rtx op, enum machine_mode mode)
2264 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2265 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
2267 return logical_operand (op, mode) || mask64_2_operand (op, mode);
2270 /* Return 1 if the operand is either a non-special register or a
2271 constant that can be used as the operand of an RS/6000 logical AND insn. */
2274 and_operand (rtx op, enum machine_mode mode)
2276 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2277 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
2279 return (logical_operand (op, mode) || mask_operand (op, mode));
2282 /* Return 1 if the operand is a general register or memory operand. */
2285 reg_or_mem_operand (rtx op, enum machine_mode mode)
2287 return (gpc_reg_operand (op, mode)
2288 || memory_operand (op, mode)
2289 || macho_lo_sum_memory_operand (op, mode)
2290 || volatile_mem_operand (op, mode));
2293 /* Return 1 if the operand is a general register or memory operand without
2294 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2298 lwa_operand (rtx op, enum machine_mode mode)
2302 if (reload_completed && GET_CODE (inner) == SUBREG)
2303 inner = SUBREG_REG (inner);
2305 return gpc_reg_operand (inner, mode)
2306 || (memory_operand (inner, mode)
2307 && GET_CODE (XEXP (inner, 0)) != PRE_INC
2308 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
2309 && (GET_CODE (XEXP (inner, 0)) != PLUS
2310 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
2311 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
2314 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2317 symbol_ref_operand (rtx op, enum machine_mode mode)
2319 if (mode != VOIDmode && GET_MODE (op) != mode)
2322 return (GET_CODE (op) == SYMBOL_REF
2323 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op)));
2326 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2327 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2330 call_operand (rtx op, enum machine_mode mode)
2332 if (mode != VOIDmode && GET_MODE (op) != mode)
2335 return (GET_CODE (op) == SYMBOL_REF
2336 || (GET_CODE (op) == REG
2337 && (REGNO (op) == LINK_REGISTER_REGNUM
2338 || REGNO (op) == COUNT_REGISTER_REGNUM
2339 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2342 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2346 current_file_function_operand (rtx op,
2347 enum machine_mode mode ATTRIBUTE_UNUSED)
2349 return (GET_CODE (op) == SYMBOL_REF
2350 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op))
2351 && (SYMBOL_REF_LOCAL_P (op)
2352 || (op == XEXP (DECL_RTL (current_function_decl), 0))));
2355 /* Return 1 if this operand is a valid input for a move insn. */
2358 input_operand (rtx op, enum machine_mode mode)
2360 /* Memory is always valid. */
2361 if (memory_operand (op, mode))
2364 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
2365 if (GET_CODE (op) == CONSTANT_P_RTX)
2368 /* For floating-point, easy constants are valid. */
2369 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2371 && easy_fp_constant (op, mode))
2374 /* Allow any integer constant. */
2375 if (GET_MODE_CLASS (mode) == MODE_INT
2376 && (GET_CODE (op) == CONST_INT
2377 || GET_CODE (op) == CONST_DOUBLE))
2380 /* Allow easy vector constants. */
2381 if (GET_CODE (op) == CONST_VECTOR
2382 && easy_vector_constant (op, mode))
2385 /* For floating-point or multi-word mode, the only remaining valid type
2387 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2388 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2389 return register_operand (op, mode);
2391 /* The only cases left are integral modes one word or smaller (we
2392 do not get called for MODE_CC values). These can be in any
2394 if (register_operand (op, mode))
2397 /* A SYMBOL_REF referring to the TOC is valid. */
2398 if (legitimate_constant_pool_address_p (op))
2401 /* A constant pool expression (relative to the TOC) is valid */
2402 if (toc_relative_expr_p (op))
2405 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2407 if (DEFAULT_ABI == ABI_V4
2408 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2409 && small_data_operand (op, Pmode))
2416 /* Darwin, AIX increases natural record alignment to doubleword if the first
2417 field is an FP double while the FP fields remain word aligned. */
2420 rs6000_special_round_type_align (tree type, int computed, int specified)
2422 tree field = TYPE_FIELDS (type);
2424 /* Skip all the static variables only if ABI is greater than
2426 while (field != NULL && TREE_CODE (field) == VAR_DECL)
2427 field = TREE_CHAIN (field);
2429 if (field == NULL || field == type || DECL_MODE (field) != DFmode)
2430 return MAX (computed, specified);
2432 return MAX (MAX (computed, specified), 64);
2435 /* Return 1 for an operand in small memory on V.4/eabi. */
2438 small_data_operand (rtx op ATTRIBUTE_UNUSED,
2439 enum machine_mode mode ATTRIBUTE_UNUSED)
2444 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2447 if (DEFAULT_ABI != ABI_V4)
2450 if (GET_CODE (op) == SYMBOL_REF)
2453 else if (GET_CODE (op) != CONST
2454 || GET_CODE (XEXP (op, 0)) != PLUS
2455 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2456 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2461 rtx sum = XEXP (op, 0);
2462 HOST_WIDE_INT summand;
2464 /* We have to be careful here, because it is the referenced address
2465 that must be 32k from _SDA_BASE_, not just the symbol. */
2466 summand = INTVAL (XEXP (sum, 1));
2467 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2470 sym_ref = XEXP (sum, 0);
2473 return SYMBOL_REF_SMALL_P (sym_ref);
2479 /* Return true, if operand is a memory operand and has a
2480 displacement divisible by 4. */
2483 word_offset_memref_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2488 if (!memory_operand (op, mode))
2491 addr = XEXP (op, 0);
2492 if (GET_CODE (addr) == PLUS
2493 && GET_CODE (XEXP (addr, 0)) == REG
2494 && GET_CODE (XEXP (addr, 1)) == CONST_INT)
2495 off = INTVAL (XEXP (addr, 1));
2497 return (off % 4) == 0;
2500 /* Return true if either operand is a general purpose register. */
2503 gpr_or_gpr_p (rtx op0, rtx op1)
2505 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
2506 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
2510 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2513 constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
2515 switch (GET_CODE(op))
2518 if (RS6000_SYMBOL_REF_TLS_P (op))
2520 else if (CONSTANT_POOL_ADDRESS_P (op))
2522 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2530 else if (! strcmp (XSTR (op, 0), toc_label_name))
2539 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2540 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2542 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2551 constant_pool_expr_p (rtx op)
2555 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2559 toc_relative_expr_p (rtx op)
2563 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2566 /* SPE offset addressing is limited to 5-bits worth of double words. */
2567 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2570 legitimate_constant_pool_address_p (rtx x)
2573 && GET_CODE (x) == PLUS
2574 && GET_CODE (XEXP (x, 0)) == REG
2575 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2576 && constant_pool_expr_p (XEXP (x, 1)));
2580 legitimate_small_data_p (enum machine_mode mode, rtx x)
2582 return (DEFAULT_ABI == ABI_V4
2583 && !flag_pic && !TARGET_TOC
2584 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2585 && small_data_operand (x, mode));
2589 legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
2591 unsigned HOST_WIDE_INT offset, extra;
2593 if (GET_CODE (x) != PLUS)
2595 if (GET_CODE (XEXP (x, 0)) != REG)
2597 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2599 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2602 offset = INTVAL (XEXP (x, 1));
2610 /* AltiVec vector modes. Only reg+reg addressing is valid here,
2611 which leaves the only valid constant offset of zero, which by
2612 canonicalization rules is also invalid. */
2619 /* SPE vector modes. */
2620 return SPE_CONST_OFFSET_OK (offset);
2624 if (mode == DFmode || !TARGET_POWERPC64)
2626 else if (offset & 3)
2632 if (mode == TFmode || !TARGET_POWERPC64)
2634 else if (offset & 3)
2645 return (offset < 0x10000) && (offset + extra < 0x10000);
2649 legitimate_indexed_address_p (rtx x, int strict)
2653 if (GET_CODE (x) != PLUS)
2658 if (!REG_P (op0) || !REG_P (op1))
2661 return ((INT_REG_OK_FOR_BASE_P (op0, strict)
2662 && INT_REG_OK_FOR_INDEX_P (op1, strict))
2663 || (INT_REG_OK_FOR_BASE_P (op1, strict)
2664 && INT_REG_OK_FOR_INDEX_P (op0, strict)));
2668 legitimate_indirect_address_p (rtx x, int strict)
2670 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2674 macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
2676 if (!TARGET_MACHO || !flag_pic
2677 || mode != SImode || GET_CODE(x) != MEM)
2681 if (GET_CODE (x) != LO_SUM)
2683 if (GET_CODE (XEXP (x, 0)) != REG)
2685 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
2689 return CONSTANT_P (x);
2693 legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
2695 if (GET_CODE (x) != LO_SUM)
2697 if (GET_CODE (XEXP (x, 0)) != REG)
2699 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2703 if (TARGET_ELF || TARGET_MACHO)
2705 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
2709 if (GET_MODE_NUNITS (mode) != 1)
2711 if (GET_MODE_BITSIZE (mode) > 32
2712 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode))
2715 return CONSTANT_P (x);
2722 /* Try machine-dependent ways of modifying an illegitimate address
2723 to be legitimate. If we find one, return the new, valid address.
2724 This is used from only one place: `memory_address' in explow.c.
2726 OLDX is the address as it was before break_out_memory_refs was
2727 called. In some cases it is useful to look at this to decide what
2730 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2732 It is always safe for this function to do nothing. It exists to
2733 recognize opportunities to optimize the output.
2735 On RS/6000, first check for the sum of a register with a constant
2736 integer that is out of range. If so, generate code to add the
2737 constant with the low-order 16 bits masked to the register and force
2738 this result into another register (this can be done with `cau').
2739 Then generate an address of REG+(CONST&0xffff), allowing for the
2740 possibility of bit 16 being a one.
2742 Then check for the sum of a register and something not constant, try to
2743 load the other things into a register and return the sum. */
2746 rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
2747 enum machine_mode mode)
2749 if (GET_CODE (x) == SYMBOL_REF)
2751 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2753 return rs6000_legitimize_tls_address (x, model);
2756 if (GET_CODE (x) == PLUS
2757 && GET_CODE (XEXP (x, 0)) == REG
2758 && GET_CODE (XEXP (x, 1)) == CONST_INT
2759 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2761 HOST_WIDE_INT high_int, low_int;
2763 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2764 high_int = INTVAL (XEXP (x, 1)) - low_int;
2765 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2766 GEN_INT (high_int)), 0);
2767 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2769 else if (GET_CODE (x) == PLUS
2770 && GET_CODE (XEXP (x, 0)) == REG
2771 && GET_CODE (XEXP (x, 1)) != CONST_INT
2772 && GET_MODE_NUNITS (mode) == 1
2773 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2775 || (mode != DFmode && mode != TFmode))
2776 && (TARGET_POWERPC64 || mode != DImode)
2779 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2780 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2782 else if (ALTIVEC_VECTOR_MODE (mode))
2786 /* Make sure both operands are registers. */
2787 if (GET_CODE (x) == PLUS)
2788 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2789 force_reg (Pmode, XEXP (x, 1)));
2791 reg = force_reg (Pmode, x);
2794 else if (SPE_VECTOR_MODE (mode))
2796 /* We accept [reg + reg] and [reg + OFFSET]. */
2798 if (GET_CODE (x) == PLUS)
2800 rtx op1 = XEXP (x, 0);
2801 rtx op2 = XEXP (x, 1);
2803 op1 = force_reg (Pmode, op1);
2805 if (GET_CODE (op2) != REG
2806 && (GET_CODE (op2) != CONST_INT
2807 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2808 op2 = force_reg (Pmode, op2);
2810 return gen_rtx_PLUS (Pmode, op1, op2);
2813 return force_reg (Pmode, x);
2819 && GET_CODE (x) != CONST_INT
2820 && GET_CODE (x) != CONST_DOUBLE
2822 && GET_MODE_NUNITS (mode) == 1
2823 && (GET_MODE_BITSIZE (mode) <= 32
2824 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2826 rtx reg = gen_reg_rtx (Pmode);
2827 emit_insn (gen_elf_high (reg, x));
2828 return gen_rtx_LO_SUM (Pmode, reg, x);
2830 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2833 && ! MACHO_DYNAMIC_NO_PIC_P
2835 && GET_CODE (x) != CONST_INT
2836 && GET_CODE (x) != CONST_DOUBLE
2838 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2842 rtx reg = gen_reg_rtx (Pmode);
2843 emit_insn (gen_macho_high (reg, x));
2844 return gen_rtx_LO_SUM (Pmode, reg, x);
2847 && constant_pool_expr_p (x)
2848 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2850 return create_TOC_reference (x);
2856 /* This is called from dwarf2out.c via ASM_OUTPUT_DWARF_DTPREL.
2857 We need to emit DTP-relative relocations. */
2860 rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
2865 fputs ("\t.long\t", file);
2868 fputs (DOUBLE_INT_ASM_OP, file);
2873 output_addr_const (file, x);
2874 fputs ("@dtprel+0x8000", file);
2877 /* Construct the SYMBOL_REF for the tls_get_addr function. */
2879 static GTY(()) rtx rs6000_tls_symbol;
2881 rs6000_tls_get_addr (void)
2883 if (!rs6000_tls_symbol)
2884 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
2886 return rs6000_tls_symbol;
2889 /* Construct the SYMBOL_REF for TLS GOT references. */
2891 static GTY(()) rtx rs6000_got_symbol;
2893 rs6000_got_sym (void)
2895 if (!rs6000_got_symbol)
2897 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2898 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
2899 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
2902 return rs6000_got_symbol;
2905 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2906 this (thread-local) address. */
2909 rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
2913 dest = gen_reg_rtx (Pmode);
2914 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
2920 tlsreg = gen_rtx_REG (Pmode, 13);
2921 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
2925 tlsreg = gen_rtx_REG (Pmode, 2);
2926 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
2930 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
2934 tmp = gen_reg_rtx (Pmode);
2937 tlsreg = gen_rtx_REG (Pmode, 13);
2938 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
2942 tlsreg = gen_rtx_REG (Pmode, 2);
2943 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
2947 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
2949 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
2954 rtx r3, got, tga, tmp1, tmp2, eqv;
2957 got = gen_rtx_REG (Pmode, 2);
2961 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
2964 rtx gsym = rs6000_got_sym ();
2965 got = gen_reg_rtx (Pmode);
2967 rs6000_emit_move (got, gsym, Pmode);
2970 rtx tempLR, tmp3, mem;
2973 tempLR = gen_reg_rtx (Pmode);
2974 tmp1 = gen_reg_rtx (Pmode);
2975 tmp2 = gen_reg_rtx (Pmode);
2976 tmp3 = gen_reg_rtx (Pmode);
2977 mem = gen_rtx_MEM (Pmode, tmp1);
2978 RTX_UNCHANGING_P (mem) = 1;
2980 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, gsym));
2981 emit_move_insn (tmp1, tempLR);
2982 emit_move_insn (tmp2, mem);
2983 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
2984 last = emit_move_insn (got, tmp3);
2985 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
2987 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
2989 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
2995 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
2997 r3 = gen_rtx_REG (Pmode, 3);
2999 insn = gen_tls_gd_64 (r3, got, addr);
3001 insn = gen_tls_gd_32 (r3, got, addr);
3004 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3005 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3006 insn = emit_call_insn (insn);
3007 CONST_OR_PURE_CALL_P (insn) = 1;
3008 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3009 insn = get_insns ();
3011 emit_libcall_block (insn, dest, r3, addr);
3013 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3015 r3 = gen_rtx_REG (Pmode, 3);
3017 insn = gen_tls_ld_64 (r3, got);
3019 insn = gen_tls_ld_32 (r3, got);
3022 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3023 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3024 insn = emit_call_insn (insn);
3025 CONST_OR_PURE_CALL_P (insn) = 1;
3026 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3027 insn = get_insns ();
3029 tmp1 = gen_reg_rtx (Pmode);
3030 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3032 emit_libcall_block (insn, tmp1, r3, eqv);
3033 if (rs6000_tls_size == 16)
3036 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3038 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3040 else if (rs6000_tls_size == 32)
3042 tmp2 = gen_reg_rtx (Pmode);
3044 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3046 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3049 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3051 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3055 tmp2 = gen_reg_rtx (Pmode);
3057 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3059 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3061 insn = gen_rtx_SET (Pmode, dest,
3062 gen_rtx_PLUS (Pmode, tmp2, tmp1));
3068 /* IE, or 64 bit offset LE. */
3069 tmp2 = gen_reg_rtx (Pmode);
3071 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3073 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3076 insn = gen_tls_tls_64 (dest, tmp2, addr);
3078 insn = gen_tls_tls_32 (dest, tmp2, addr);
3086 /* Return 1 if X is a SYMBOL_REF for a TLS symbol. This is used in
3087 instruction definitions. */
3090 rs6000_tls_symbol_ref (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
3092 return RS6000_SYMBOL_REF_TLS_P (x);
3095 /* Return 1 if X contains a thread-local symbol. */
3098 rs6000_tls_referenced_p (rtx x)
3100 if (! TARGET_HAVE_TLS)
3103 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3106 /* Return 1 if *X is a thread-local symbol. This is the same as
3107 rs6000_tls_symbol_ref except for the type of the unused argument. */
3110 rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
3112 return RS6000_SYMBOL_REF_TLS_P (*x);
3115 /* The convention appears to be to define this wherever it is used.
3116 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3117 is now used here. */
3118 #ifndef REG_MODE_OK_FOR_BASE_P
3119 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3122 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3123 replace the input X, or the original X if no replacement is called for.
3124 The output parameter *WIN is 1 if the calling macro should goto WIN,
3127 For RS/6000, we wish to handle large displacements off a base
3128 register by splitting the addend across an addiu/addis and the mem insn.
3129 This cuts number of extra insns needed from 3 to 1.
3131 On Darwin, we use this to generate code for floating point constants.
3132 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3133 The Darwin code is inside #if TARGET_MACHO because only then is
3134 machopic_function_base_name() defined. */
3136 rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
3137 int opnum, int type, int ind_levels ATTRIBUTE_UNUSED, int *win)
3139 /* We must recognize output that we have already generated ourselves. */
3140 if (GET_CODE (x) == PLUS
3141 && GET_CODE (XEXP (x, 0)) == PLUS
3142 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3143 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3144 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3146 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3147 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3148 opnum, (enum reload_type)type);
3154 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3155 && GET_CODE (x) == LO_SUM
3156 && GET_CODE (XEXP (x, 0)) == PLUS
3157 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3158 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3159 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3160 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3161 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3162 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3163 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3165 /* Result of previous invocation of this function on Darwin
3166 floating point constant. */
3167 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3168 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3169 opnum, (enum reload_type)type);
3175 /* Force ld/std non-word aligned offset into base register by wrapping
3177 if (GET_CODE (x) == PLUS
3178 && GET_CODE (XEXP (x, 0)) == REG
3179 && REGNO (XEXP (x, 0)) < 32
3180 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3181 && GET_CODE (XEXP (x, 1)) == CONST_INT
3182 && (INTVAL (XEXP (x, 1)) & 3) != 0
3183 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
3184 && TARGET_POWERPC64)
3186 x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
3187 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3188 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3189 opnum, (enum reload_type) type);
3194 if (GET_CODE (x) == PLUS
3195 && GET_CODE (XEXP (x, 0)) == REG
3196 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3197 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3198 && GET_CODE (XEXP (x, 1)) == CONST_INT
3199 && !SPE_VECTOR_MODE (mode)
3200 && !ALTIVEC_VECTOR_MODE (mode))
3202 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3203 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3205 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
3207 /* Check for 32-bit overflow. */
3208 if (high + low != val)
3214 /* Reload the high part into a base reg; leave the low part
3215 in the mem directly. */
3217 x = gen_rtx_PLUS (GET_MODE (x),
3218 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3222 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3223 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3224 opnum, (enum reload_type)type);
3230 if (GET_CODE (x) == SYMBOL_REF
3231 && DEFAULT_ABI == ABI_DARWIN
3232 && !ALTIVEC_VECTOR_MODE (mode)
3233 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
3234 /* Don't do this for TFmode, since the result isn't offsettable. */
3239 rtx offset = gen_rtx_CONST (Pmode,
3240 gen_rtx_MINUS (Pmode, x,
3241 gen_rtx_SYMBOL_REF (Pmode,
3242 machopic_function_base_name ())));
3243 x = gen_rtx_LO_SUM (GET_MODE (x),
3244 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
3245 gen_rtx_HIGH (Pmode, offset)), offset);
3248 x = gen_rtx_LO_SUM (GET_MODE (x),
3249 gen_rtx_HIGH (Pmode, x), x);
3251 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3252 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3253 opnum, (enum reload_type)type);
3260 && constant_pool_expr_p (x)
3261 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3263 (x) = create_TOC_reference (x);
3271 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3272 that is a valid memory address for an instruction.
3273 The MODE argument is the machine mode for the MEM expression
3274 that wants to use this address.
3276 On the RS/6000, there are four valid address: a SYMBOL_REF that
3277 refers to a constant pool entry of an address (or the sum of it
3278 plus a constant), a short (16-bit signed) constant plus a register,
3279 the sum of two registers, or a register indirect, possibly with an
3280 auto-increment. For DFmode and DImode with a constant plus register,
3281 we must ensure that both words are addressable or PowerPC64 with offset
3284 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3285 32-bit DImode, TImode), indexed addressing cannot be used because
3286 adjacent memory cells are accessed by adding word-sized offsets
3287 during assembly output. */
3289 rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
3291 if (RS6000_SYMBOL_REF_TLS_P (x))
3293 if (legitimate_indirect_address_p (x, reg_ok_strict))
3295 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3296 && !ALTIVEC_VECTOR_MODE (mode)
3297 && !SPE_VECTOR_MODE (mode)
3299 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3301 if (legitimate_small_data_p (mode, x))
3303 if (legitimate_constant_pool_address_p (x))
3305 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3307 && GET_CODE (x) == PLUS
3308 && GET_CODE (XEXP (x, 0)) == REG
3309 && (XEXP (x, 0) == virtual_stack_vars_rtx
3310 || XEXP (x, 0) == arg_pointer_rtx)
3311 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3313 if (legitimate_offset_address_p (mode, x, reg_ok_strict))
3316 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3318 || (mode != DFmode && mode != TFmode))
3319 && (TARGET_POWERPC64 || mode != DImode)
3320 && legitimate_indexed_address_p (x, reg_ok_strict))
3322 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3327 /* Go to LABEL if ADDR (a legitimate address expression)
3328 has an effect that depends on the machine mode it is used for.
3330 On the RS/6000 this is true of all integral offsets (since AltiVec
3331 modes don't allow them) or is a pre-increment or decrement.
3333 ??? Except that due to conceptual problems in offsettable_address_p
3334 we can't really report the problems of integral offsets. So leave
3335 this assuming that the adjustable offset must be valid for the
3336 sub-words of a TFmode operand, which is what we had before. */
3339 rs6000_mode_dependent_address (rtx addr)
3341 switch (GET_CODE (addr))
3344 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3346 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3347 return val + 12 + 0x8000 >= 0x10000;
3356 return TARGET_UPDATE;
3365 /* Try to output insns to set TARGET equal to the constant C if it can
3366 be done in less than N insns. Do all computations in MODE.
3367 Returns the place where the output has been placed if it can be
3368 done and the insns have been emitted. If it would take more than N
3369 insns, zero is returned and no insns and emitted. */
3372 rs6000_emit_set_const (rtx dest, enum machine_mode mode,
3373 rtx source, int n ATTRIBUTE_UNUSED)
3375 rtx result, insn, set;
3376 HOST_WIDE_INT c0, c1;
3378 if (mode == QImode || mode == HImode)
3381 dest = gen_reg_rtx (mode);
3382 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3385 else if (mode == SImode)
3387 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3389 emit_insn (gen_rtx_SET (VOIDmode, result,
3390 GEN_INT (INTVAL (source)
3391 & (~ (HOST_WIDE_INT) 0xffff))));
3392 emit_insn (gen_rtx_SET (VOIDmode, dest,
3393 gen_rtx_IOR (SImode, result,
3394 GEN_INT (INTVAL (source) & 0xffff))));
3397 else if (mode == DImode)
3399 if (GET_CODE (source) == CONST_INT)
3401 c0 = INTVAL (source);
3404 else if (GET_CODE (source) == CONST_DOUBLE)
3406 #if HOST_BITS_PER_WIDE_INT >= 64
3407 c0 = CONST_DOUBLE_LOW (source);
3410 c0 = CONST_DOUBLE_LOW (source);
3411 c1 = CONST_DOUBLE_HIGH (source);
3417 result = rs6000_emit_set_long_const (dest, c0, c1);
3422 insn = get_last_insn ();
3423 set = single_set (insn);
3424 if (! CONSTANT_P (SET_SRC (set)))
3425 set_unique_reg_note (insn, REG_EQUAL, source);
3430 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3431 fall back to a straight forward decomposition. We do this to avoid
3432 exponential run times encountered when looking for longer sequences
3433 with rs6000_emit_set_const. */
3435 rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
3437 if (!TARGET_POWERPC64)
3439 rtx operand1, operand2;
3441 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3443 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3445 emit_move_insn (operand1, GEN_INT (c1));
3446 emit_move_insn (operand2, GEN_INT (c2));
3450 HOST_WIDE_INT ud1, ud2, ud3, ud4;
3453 ud2 = (c1 & 0xffff0000) >> 16;
3454 #if HOST_BITS_PER_WIDE_INT >= 64
3458 ud4 = (c2 & 0xffff0000) >> 16;
3460 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
3461 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
3464 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
3466 emit_move_insn (dest, GEN_INT (ud1));
3469 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
3470 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
3473 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
3476 emit_move_insn (dest, GEN_INT (ud2 << 16));
3478 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3480 else if ((ud4 == 0xffff && (ud3 & 0x8000))
3481 || (ud4 == 0 && ! (ud3 & 0x8000)))
3484 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
3487 emit_move_insn (dest, GEN_INT (ud3 << 16));
3490 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3491 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3493 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3498 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
3501 emit_move_insn (dest, GEN_INT (ud4 << 16));
3504 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
3506 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3508 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3509 GEN_INT (ud2 << 16)));
3511 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3517 /* Emit a move from SOURCE to DEST in mode MODE. */
3519 rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
3523 operands[1] = source;
3525 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
3526 if (GET_CODE (operands[1]) == CONST_DOUBLE
3527 && ! FLOAT_MODE_P (mode)
3528 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3530 /* FIXME. This should never happen. */
3531 /* Since it seems that it does, do the safe thing and convert
3533 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
3535 if (GET_CODE (operands[1]) == CONST_DOUBLE
3536 && ! FLOAT_MODE_P (mode)
3537 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
3538 && CONST_DOUBLE_LOW (operands[1]) >= 0)
3539 || (CONST_DOUBLE_HIGH (operands[1]) == -1
3540 && CONST_DOUBLE_LOW (operands[1]) < 0)))
3543 /* Check if GCC is setting up a block move that will end up using FP
3544 registers as temporaries. We must make sure this is acceptable. */
3545 if (GET_CODE (operands[0]) == MEM
3546 && GET_CODE (operands[1]) == MEM
3548 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3549 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3550 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3551 ? 32 : MEM_ALIGN (operands[0])))
3552 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
3554 : MEM_ALIGN (operands[1]))))
3555 && ! MEM_VOLATILE_P (operands [0])
3556 && ! MEM_VOLATILE_P (operands [1]))
3558 emit_move_insn (adjust_address (operands[0], SImode, 0),
3559 adjust_address (operands[1], SImode, 0));
3560 emit_move_insn (adjust_address (operands[0], SImode, 4),
3561 adjust_address (operands[1], SImode, 4));
3565 if (!no_new_pseudos)
3567 if (GET_CODE (operands[1]) == MEM && optimize > 0
3568 && (mode == QImode || mode == HImode || mode == SImode)
3569 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
3571 rtx reg = gen_reg_rtx (word_mode);
3573 emit_insn (gen_rtx_SET (word_mode, reg,
3574 gen_rtx_ZERO_EXTEND (word_mode,
3576 operands[1] = gen_lowpart (mode, reg);
3578 if (GET_CODE (operands[0]) != REG)
3579 operands[1] = force_reg (mode, operands[1]);
3582 if (mode == SFmode && ! TARGET_POWERPC
3583 && TARGET_HARD_FLOAT && TARGET_FPRS
3584 && GET_CODE (operands[0]) == MEM)
3588 if (reload_in_progress || reload_completed)
3589 regnum = true_regnum (operands[1]);
3590 else if (GET_CODE (operands[1]) == REG)
3591 regnum = REGNO (operands[1]);
3595 /* If operands[1] is a register, on POWER it may have
3596 double-precision data in it, so truncate it to single
3598 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
3601 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
3602 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
3603 operands[1] = newreg;
3607 /* Recognize the case where operand[1] is a reference to thread-local
3608 data and load its address to a register. */
3609 if (GET_CODE (operands[1]) == SYMBOL_REF)
3611 enum tls_model model = SYMBOL_REF_TLS_MODEL (operands[1]);
3613 operands[1] = rs6000_legitimize_tls_address (operands[1], model);
3616 /* Handle the case where reload calls us with an invalid address. */
3617 if (reload_in_progress && mode == Pmode
3618 && (! general_operand (operands[1], mode)
3619 || ! nonimmediate_operand (operands[0], mode)))
3622 /* Handle the case of CONSTANT_P_RTX. */
3623 if (GET_CODE (operands[1]) == CONSTANT_P_RTX)
3626 /* 128-bit constant floating-point values on Darwin should really be
3627 loaded as two parts. */
3628 if ((DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
3629 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128
3630 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
3632 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
3633 know how to get a DFmode SUBREG of a TFmode. */
3634 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode, 0),
3635 simplify_gen_subreg (DImode, operands[1], mode, 0),
3637 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode,
3638 GET_MODE_SIZE (DImode)),
3639 simplify_gen_subreg (DImode, operands[1], mode,
3640 GET_MODE_SIZE (DImode)),
3645 /* FIXME: In the long term, this switch statement should go away
3646 and be replaced by a sequence of tests based on things like
3652 if (CONSTANT_P (operands[1])
3653 && GET_CODE (operands[1]) != CONST_INT)
3654 operands[1] = force_const_mem (mode, operands[1]);
3660 if (CONSTANT_P (operands[1])
3661 && ! easy_fp_constant (operands[1], mode))
3662 operands[1] = force_const_mem (mode, operands[1]);
3673 if (CONSTANT_P (operands[1])
3674 && !easy_vector_constant (operands[1], mode))
3675 operands[1] = force_const_mem (mode, operands[1]);
3680 /* Use default pattern for address of ELF small data */
3683 && DEFAULT_ABI == ABI_V4
3684 && (GET_CODE (operands[1]) == SYMBOL_REF
3685 || GET_CODE (operands[1]) == CONST)
3686 && small_data_operand (operands[1], mode))
3688 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3692 if (DEFAULT_ABI == ABI_V4
3693 && mode == Pmode && mode == SImode
3694 && flag_pic == 1 && got_operand (operands[1], mode))
3696 emit_insn (gen_movsi_got (operands[0], operands[1]));
3700 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
3704 && CONSTANT_P (operands[1])
3705 && GET_CODE (operands[1]) != HIGH
3706 && GET_CODE (operands[1]) != CONST_INT)
3708 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
3710 /* If this is a function address on -mcall-aixdesc,
3711 convert it to the address of the descriptor. */
3712 if (DEFAULT_ABI == ABI_AIX
3713 && GET_CODE (operands[1]) == SYMBOL_REF
3714 && XSTR (operands[1], 0)[0] == '.')
3716 const char *name = XSTR (operands[1], 0);
3718 while (*name == '.')
3720 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
3721 CONSTANT_POOL_ADDRESS_P (new_ref)
3722 = CONSTANT_POOL_ADDRESS_P (operands[1]);
3723 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
3724 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
3725 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
3726 operands[1] = new_ref;
3729 if (DEFAULT_ABI == ABI_DARWIN)
3732 if (MACHO_DYNAMIC_NO_PIC_P)
3734 /* Take care of any required data indirection. */
3735 operands[1] = rs6000_machopic_legitimize_pic_address (
3736 operands[1], mode, operands[0]);
3737 if (operands[0] != operands[1])
3738 emit_insn (gen_rtx_SET (VOIDmode,
3739 operands[0], operands[1]));
3743 emit_insn (gen_macho_high (target, operands[1]));
3744 emit_insn (gen_macho_low (operands[0], target, operands[1]));
3748 emit_insn (gen_elf_high (target, operands[1]));
3749 emit_insn (gen_elf_low (operands[0], target, operands[1]));
3753 /* If this is a SYMBOL_REF that refers to a constant pool entry,
3754 and we have put it in the TOC, we just need to make a TOC-relative
3757 && GET_CODE (operands[1]) == SYMBOL_REF
3758 && constant_pool_expr_p (operands[1])
3759 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
3760 get_pool_mode (operands[1])))
3762 operands[1] = create_TOC_reference (operands[1]);
3764 else if (mode == Pmode
3765 && CONSTANT_P (operands[1])
3766 && ((GET_CODE (operands[1]) != CONST_INT
3767 && ! easy_fp_constant (operands[1], mode))
3768 || (GET_CODE (operands[1]) == CONST_INT
3769 && num_insns_constant (operands[1], mode) > 2)
3770 || (GET_CODE (operands[0]) == REG
3771 && FP_REGNO_P (REGNO (operands[0]))))
3772 && GET_CODE (operands[1]) != HIGH
3773 && ! legitimate_constant_pool_address_p (operands[1])
3774 && ! toc_relative_expr_p (operands[1]))
3776 /* Emit a USE operation so that the constant isn't deleted if
3777 expensive optimizations are turned on because nobody
3778 references it. This should only be done for operands that
3779 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
3780 This should not be done for operands that contain LABEL_REFs.
3781 For now, we just handle the obvious case. */
3782 if (GET_CODE (operands[1]) != LABEL_REF)
3783 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
3786 /* Darwin uses a special PIC legitimizer. */
3787 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
3790 rs6000_machopic_legitimize_pic_address (operands[1], mode,
3792 if (operands[0] != operands[1])
3793 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3798 /* If we are to limit the number of things we put in the TOC and
3799 this is a symbol plus a constant we can add in one insn,
3800 just put the symbol in the TOC and add the constant. Don't do
3801 this if reload is in progress. */
3802 if (GET_CODE (operands[1]) == CONST
3803 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
3804 && GET_CODE (XEXP (operands[1], 0)) == PLUS
3805 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
3806 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
3807 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
3808 && ! side_effects_p (operands[0]))
3811 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
3812 rtx other = XEXP (XEXP (operands[1], 0), 1);
3814 sym = force_reg (mode, sym);
3816 emit_insn (gen_addsi3 (operands[0], sym, other));
3818 emit_insn (gen_adddi3 (operands[0], sym, other));
3822 operands[1] = force_const_mem (mode, operands[1]);
3825 && constant_pool_expr_p (XEXP (operands[1], 0))
3826 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3827 get_pool_constant (XEXP (operands[1], 0)),
3828 get_pool_mode (XEXP (operands[1], 0))))
3831 = gen_rtx_MEM (mode,
3832 create_TOC_reference (XEXP (operands[1], 0)));
3833 set_mem_alias_set (operands[1], get_TOC_alias_set ());
3834 RTX_UNCHANGING_P (operands[1]) = 1;
3840 if (GET_CODE (operands[0]) == MEM
3841 && GET_CODE (XEXP (operands[0], 0)) != REG
3842 && ! reload_in_progress)
3844 = replace_equiv_address (operands[0],
3845 copy_addr_to_reg (XEXP (operands[0], 0)));
3847 if (GET_CODE (operands[1]) == MEM
3848 && GET_CODE (XEXP (operands[1], 0)) != REG
3849 && ! reload_in_progress)
3851 = replace_equiv_address (operands[1],
3852 copy_addr_to_reg (XEXP (operands[1], 0)));
3855 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3857 gen_rtx_SET (VOIDmode,
3858 operands[0], operands[1]),
3859 gen_rtx_CLOBBER (VOIDmode,
3860 gen_rtx_SCRATCH (SImode)))));
3869 /* Above, we may have called force_const_mem which may have returned
3870 an invalid address. If we can, fix this up; otherwise, reload will
3871 have to deal with it. */
3872 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
3873 operands[1] = validize_mem (operands[1]);
3876 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3879 /* Nonzero if we can use a floating-point register to pass this arg. */
3880 #define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
3881 (GET_MODE_CLASS (MODE) == MODE_FLOAT \
3882 && (CUM)->fregno <= FP_ARG_MAX_REG \
3883 && TARGET_HARD_FLOAT && TARGET_FPRS)
3885 /* Nonzero if we can use an AltiVec register to pass this arg. */
3886 #define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
3887 (ALTIVEC_VECTOR_MODE (MODE) \
3888 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
3889 && TARGET_ALTIVEC_ABI \
3892 /* Return a nonzero value to say to return the function value in
3893 memory, just as large structures are always returned. TYPE will be
3894 the data type of the value, and FNTYPE will be the type of the
3895 function doing the returning, or @code{NULL} for libcalls.
3897 The AIX ABI for the RS/6000 specifies that all structures are
3898 returned in memory. The Darwin ABI does the same. The SVR4 ABI
3899 specifies that structures <= 8 bytes are returned in r3/r4, but a
3900 draft put them in memory, and GCC used to implement the draft
3901 instead of the final standard. Therefore, TARGET_AIX_STRUCT_RET
3902 controls this instead of DEFAULT_ABI; V.4 targets needing backward
3903 compatibility can change DRAFT_V4_STRUCT_RET to override the
3904 default, and -m switches get the final word. See
3905 rs6000_override_options for more details.
3907 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
3908 long double support is enabled. These values are returned in memory.
3910 int_size_in_bytes returns -1 for variable size objects, which go in
3911 memory always. The cast to unsigned makes -1 > 8. */
3914 rs6000_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
3916 if (AGGREGATE_TYPE_P (type)
3917 && (TARGET_AIX_STRUCT_RET
3918 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
3920 if (DEFAULT_ABI == ABI_V4 && TYPE_MODE (type) == TFmode)
3925 /* Initialize a variable CUM of type CUMULATIVE_ARGS
3926 for a call to a function whose data type is FNTYPE.
3927 For a library call, FNTYPE is 0.
3929 For incoming args we set the number of arguments in the prototype large
3930 so we never return a PARALLEL. */
3933 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
3934 rtx libname ATTRIBUTE_UNUSED, int incoming,
3935 int libcall, int n_named_args)
3937 static CUMULATIVE_ARGS zero_cumulative;
3939 *cum = zero_cumulative;
3941 cum->fregno = FP_ARG_MIN_REG;
3942 cum->vregno = ALTIVEC_ARG_MIN_REG;
3943 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
3944 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
3945 ? CALL_LIBCALL : CALL_NORMAL);
3946 cum->sysv_gregno = GP_ARG_MIN_REG;
3947 cum->stdarg = fntype
3948 && (TYPE_ARG_TYPES (fntype) != 0
3949 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3950 != void_type_node));
3952 cum->nargs_prototype = 0;
3953 if (incoming || cum->prototype)
3954 cum->nargs_prototype = n_named_args;
3956 /* Check for a longcall attribute. */
3957 if ((!fntype && rs6000_default_long_calls)
3959 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
3960 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
3961 cum->call_cookie |= CALL_LONG;
3963 if (TARGET_DEBUG_ARG)
3965 fprintf (stderr, "\ninit_cumulative_args:");
3968 tree ret_type = TREE_TYPE (fntype);
3969 fprintf (stderr, " ret code = %s,",
3970 tree_code_name[ (int)TREE_CODE (ret_type) ]);
3973 if (cum->call_cookie & CALL_LONG)
3974 fprintf (stderr, " longcall,");
3976 fprintf (stderr, " proto = %d, nargs = %d\n",
3977 cum->prototype, cum->nargs_prototype);
3982 && TARGET_ALTIVEC_ABI
3983 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
3985 error ("Cannot return value in vector register because"
3986 " altivec instructions are disabled, use -maltivec"
3987 " to enable them.");
3991 /* If defined, a C expression which determines whether, and in which
3992 direction, to pad out an argument with extra space. The value
3993 should be of type `enum direction': either `upward' to pad above
3994 the argument, `downward' to pad below, or `none' to inhibit
3997 For the AIX ABI structs are always stored left shifted in their
4001 function_arg_padding (enum machine_mode mode, tree type)
4003 #ifndef AGGREGATE_PADDING_FIXED
4004 #define AGGREGATE_PADDING_FIXED 0
4006 #ifndef AGGREGATES_PAD_UPWARD_ALWAYS
4007 #define AGGREGATES_PAD_UPWARD_ALWAYS 0
4010 if (!AGGREGATE_PADDING_FIXED)
4012 /* GCC used to pass structures of the same size as integer types as
4013 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
4014 ie. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
4015 passed padded downward, except that -mstrict-align further
4016 muddied the water in that multi-component structures of 2 and 4
4017 bytes in size were passed padded upward.
4019 The following arranges for best compatibility with previous
4020 versions of gcc, but removes the -mstrict-align dependency. */
4021 if (BYTES_BIG_ENDIAN)
4023 HOST_WIDE_INT size = 0;
4025 if (mode == BLKmode)
4027 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
4028 size = int_size_in_bytes (type);
4031 size = GET_MODE_SIZE (mode);
4033 if (size == 1 || size == 2 || size == 4)
4039 if (AGGREGATES_PAD_UPWARD_ALWAYS)
4041 if (type != 0 && AGGREGATE_TYPE_P (type))
4045 /* Fall back to the default. */
4046 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
4049 /* If defined, a C expression that gives the alignment boundary, in bits,
4050 of an argument with the specified mode and type. If it is not defined,
4051 PARM_BOUNDARY is used for all arguments.
4053 V.4 wants long longs to be double word aligned. */
4056 function_arg_boundary (enum machine_mode mode, tree type ATTRIBUTE_UNUSED)
4058 if (DEFAULT_ABI == ABI_V4 && GET_MODE_SIZE (mode) == 8)
4060 else if (SPE_VECTOR_MODE (mode))
4062 else if (ALTIVEC_VECTOR_MODE (mode))
4065 return PARM_BOUNDARY;
4068 /* Compute the size (in words) of a function argument. */
4070 static unsigned long
4071 rs6000_arg_size (enum machine_mode mode, tree type)
4075 if (mode != BLKmode)
4076 size = GET_MODE_SIZE (mode);
4078 size = int_size_in_bytes (type);
4081 return (size + 3) >> 2;
4083 return (size + 7) >> 3;
4086 /* Update the data in CUM to advance over an argument
4087 of mode MODE and data type TYPE.
4088 (TYPE is null for libcalls where that information may not be available.)
4090 Note that for args passed by reference, function_arg will be called
4091 with MODE and TYPE set to that of the pointer to the arg, not the arg
4095 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4096 tree type, int named)
4098 cum->nargs_prototype--;
4100 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4104 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4107 if (!TARGET_ALTIVEC)
4108 error ("Cannot pass argument in vector register because"
4109 " altivec instructions are disabled, use -maltivec"
4110 " to enable them.");
4112 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
4113 even if it is going to be passed in a vector register.
4114 Darwin does the same for variable-argument functions. */
4115 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4116 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
4126 /* Vector parameters must be 16-byte aligned. This places
4127 them at 2 mod 4 in terms of words in 32-bit mode, since
4128 the parameter save area starts at offset 24 from the
4129 stack. In 64-bit mode, they just have to start on an
4130 even word, since the parameter save area is 16-byte
4131 aligned. Space for GPRs is reserved even if the argument
4132 will be passed in memory. */
4134 align = (2 - cum->words) & 3;
4136 align = cum->words & 1;
4137 cum->words += align + rs6000_arg_size (mode, type);
4139 if (TARGET_DEBUG_ARG)
4141 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
4143 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
4144 cum->nargs_prototype, cum->prototype,
4145 GET_MODE_NAME (mode));
4149 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
4151 && cum->sysv_gregno <= GP_ARG_MAX_REG)
4153 else if (DEFAULT_ABI == ABI_V4)
4155 if (TARGET_HARD_FLOAT && TARGET_FPRS
4156 && (mode == SFmode || mode == DFmode))
4158 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4163 cum->words += cum->words & 1;
4164 cum->words += rs6000_arg_size (mode, type);
4169 int n_words = rs6000_arg_size (mode, type);
4170 int gregno = cum->sysv_gregno;
4172 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
4173 (r7,r8) or (r9,r10). As does any other 2 word item such
4174 as complex int due to a historical mistake. */
4176 gregno += (1 - gregno) & 1;
4178 /* Multi-reg args are not split between registers and stack. */
4179 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4181 /* Long long and SPE vectors are aligned on the stack.
4182 So are other 2 word items such as complex int due to
4183 a historical mistake. */
4185 cum->words += cum->words & 1;
4186 cum->words += n_words;
4189 /* Note: continuing to accumulate gregno past when we've started
4190 spilling to the stack indicates the fact that we've started
4191 spilling to the stack to expand_builtin_saveregs. */
4192 cum->sysv_gregno = gregno + n_words;
4195 if (TARGET_DEBUG_ARG)
4197 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4198 cum->words, cum->fregno);
4199 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
4200 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
4201 fprintf (stderr, "mode = %4s, named = %d\n",
4202 GET_MODE_NAME (mode), named);
4207 int n_words = rs6000_arg_size (mode, type);
4208 int align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
4210 /* The simple alignment calculation here works because
4211 function_arg_boundary / PARM_BOUNDARY will only be 1 or 2.
4212 If we ever want to handle alignments larger than 8 bytes for
4213 32-bit or 16 bytes for 64-bit, then we'll need to take into
4214 account the offset to the start of the parm save area. */
4215 align &= cum->words;
4216 cum->words += align + n_words;
4218 if (GET_MODE_CLASS (mode) == MODE_FLOAT
4219 && TARGET_HARD_FLOAT && TARGET_FPRS)
4220 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4222 if (TARGET_DEBUG_ARG)
4224 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4225 cum->words, cum->fregno);
4226 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
4227 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
4228 fprintf (stderr, "named = %d, align = %d\n", named, align);
4233 /* Determine where to put a SIMD argument on the SPE. */
4236 rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4241 int gregno = cum->sysv_gregno;
4242 int n_words = rs6000_arg_size (mode, type);
4244 /* SPE vectors are put in odd registers. */
4245 if (n_words == 2 && (gregno & 1) == 0)
4248 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4251 enum machine_mode m = SImode;
4253 r1 = gen_rtx_REG (m, gregno);
4254 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
4255 r2 = gen_rtx_REG (m, gregno + 1);
4256 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
4257 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
4264 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
4265 return gen_rtx_REG (mode, cum->sysv_gregno);
4271 /* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
4274 rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
4278 rtx rvec[GP_ARG_NUM_REG + 1];
4280 if (align_words >= GP_ARG_NUM_REG)
4283 n_units = rs6000_arg_size (mode, type);
4285 /* Optimize the simple case where the arg fits in one gpr, except in
4286 the case of BLKmode due to assign_parms assuming that registers are
4287 BITS_PER_WORD wide. */
4289 || (n_units == 1 && mode != BLKmode))
4290 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4293 if (align_words + n_units > GP_ARG_NUM_REG)
4294 /* Not all of the arg fits in gprs. Say that it goes in memory too,
4295 using a magic NULL_RTX component.
4296 FIXME: This is not strictly correct. Only some of the arg
4297 belongs in memory, not all of it. However, there isn't any way
4298 to do this currently, apart from building rtx descriptions for
4299 the pieces of memory we want stored. Due to bugs in the generic
4300 code we can't use the normal function_arg_partial_nregs scheme
4301 with the PARALLEL arg description we emit here.
4302 In any case, the code to store the whole arg to memory is often
4303 more efficient than code to store pieces, and we know that space
4304 is available in the right place for the whole arg. */
4305 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
4310 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
4311 rtx off = GEN_INT (i++ * 4);
4312 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
4314 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
4316 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4319 /* Determine where to put an argument to a function.
4320 Value is zero to push the argument on the stack,
4321 or a hard register in which to store the argument.
4323 MODE is the argument's machine mode.
4324 TYPE is the data type of the argument (as a tree).
4325 This is null for libcalls where that information may
4327 CUM is a variable of type CUMULATIVE_ARGS which gives info about
4328 the preceding args and about the function being called.
4329 NAMED is nonzero if this argument is a named parameter
4330 (otherwise it is an extra parameter matching an ellipsis).
4332 On RS/6000 the first eight words of non-FP are normally in registers
4333 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
4334 Under V.4, the first 8 FP args are in registers.
4336 If this is floating-point and no prototype is specified, we use
4337 both an FP and integer register (or possibly FP reg and stack). Library
4338 functions (when CALL_LIBCALL is set) always have the proper types for args,
4339 so we can pass the FP value just in one register. emit_library_function
4340 doesn't support PARALLEL anyway.
4342 Note that for args passed by reference, function_arg will be called
4343 with MODE and TYPE set to that of the pointer to the arg, not the arg
4347 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4348 tree type, int named)
4350 enum rs6000_abi abi = DEFAULT_ABI;
4352 /* Return a marker to indicate whether CR1 needs to set or clear the
4353 bit that V.4 uses to say fp args were passed in registers.
4354 Assume that we don't need the marker for software floating point,
4355 or compiler generated library calls. */
4356 if (mode == VOIDmode)
4359 && (cum->call_cookie & CALL_LIBCALL) == 0
4361 || (cum->nargs_prototype < 0
4362 && (cum->prototype || TARGET_NO_PROTOTYPE))))
4364 /* For the SPE, we need to crxor CR6 always. */
4366 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
4367 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
4368 return GEN_INT (cum->call_cookie
4369 | ((cum->fregno == FP_ARG_MIN_REG)
4370 ? CALL_V4_SET_FP_ARGS
4371 : CALL_V4_CLEAR_FP_ARGS));
4374 return GEN_INT (cum->call_cookie);
4377 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4378 if (TARGET_64BIT && ! cum->prototype)
4380 /* Vector parameters get passed in vector register
4381 and also in GPRs or memory, in absence of prototype. */
4384 align_words = (cum->words + 1) & ~1;
4386 if (align_words >= GP_ARG_NUM_REG)
4392 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4394 return gen_rtx_PARALLEL (mode,
4396 gen_rtx_EXPR_LIST (VOIDmode,
4398 gen_rtx_EXPR_LIST (VOIDmode,
4399 gen_rtx_REG (mode, cum->vregno),
4403 return gen_rtx_REG (mode, cum->vregno);
4404 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4406 if (named || abi == ABI_V4)
4410 /* Vector parameters to varargs functions under AIX or Darwin
4411 get passed in memory and possibly also in GPRs. */
4412 int align, align_words, n_words;
4413 enum machine_mode part_mode;
4415 /* Vector parameters must be 16-byte aligned. This places them at
4416 2 mod 4 in terms of words in 32-bit mode, since the parameter
4417 save area starts at offset 24 from the stack. In 64-bit mode,
4418 they just have to start on an even word, since the parameter
4419 save area is 16-byte aligned. */
4421 align = (2 - cum->words) & 3;
4423 align = cum->words & 1;
4424 align_words = cum->words + align;
4426 /* Out of registers? Memory, then. */
4427 if (align_words >= GP_ARG_NUM_REG)
4430 if (TARGET_32BIT && TARGET_POWERPC64)
4431 return rs6000_mixed_function_arg (mode, type, align_words);
4433 /* The vector value goes in GPRs. Only the part of the
4434 value in GPRs is reported here. */
4436 n_words = rs6000_arg_size (mode, type);
4437 if (align_words + n_words > GP_ARG_NUM_REG)
4438 /* Fortunately, there are only two possibilities, the value
4439 is either wholly in GPRs or half in GPRs and half not. */
4442 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
4445 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode))
4446 return rs6000_spe_function_arg (cum, mode, type);
4447 else if (abi == ABI_V4)
4449 if (TARGET_HARD_FLOAT && TARGET_FPRS
4450 && (mode == SFmode || mode == DFmode))
4452 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4453 return gen_rtx_REG (mode, cum->fregno);
4459 int n_words = rs6000_arg_size (mode, type);
4460 int gregno = cum->sysv_gregno;
4462 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
4463 (r7,r8) or (r9,r10). As does any other 2 word item such
4464 as complex int due to a historical mistake. */
4466 gregno += (1 - gregno) & 1;
4468 /* Multi-reg args are not split between registers and stack. */
4469 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4472 if (TARGET_32BIT && TARGET_POWERPC64)
4473 return rs6000_mixed_function_arg (mode, type,
4474 gregno - GP_ARG_MIN_REG);
4475 return gen_rtx_REG (mode, gregno);
4480 int align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
4481 int align_words = cum->words + (cum->words & align);
4483 if (USE_FP_FOR_ARG_P (cum, mode, type))
4485 rtx rvec[GP_ARG_NUM_REG + 1];
4489 enum machine_mode fmode = mode;
4490 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
4492 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
4494 /* Currently, we only ever need one reg here because complex
4495 doubles are split. */
4496 if (cum->fregno != FP_ARG_MAX_REG || fmode != TFmode)
4499 /* Long double split over regs and memory. */
4503 /* Do we also need to pass this arg in the parameter save
4506 && (cum->nargs_prototype <= 0
4507 || (DEFAULT_ABI == ABI_AIX
4509 && align_words >= GP_ARG_NUM_REG)));
4511 if (!needs_psave && mode == fmode)
4512 return gen_rtx_REG (fmode, cum->fregno);
4517 /* Describe the part that goes in gprs or the stack.
4518 This piece must come first, before the fprs. */
4519 if (align_words < GP_ARG_NUM_REG)
4521 unsigned long n_words = rs6000_arg_size (mode, type);
4523 if (align_words + n_words > GP_ARG_NUM_REG
4524 || (TARGET_32BIT && TARGET_POWERPC64))
4526 /* If this is partially on the stack, then we only
4527 include the portion actually in registers here. */
4528 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
4532 r = gen_rtx_REG (rmode,
4533 GP_ARG_MIN_REG + align_words);
4534 off = GEN_INT (k * GET_MODE_SIZE (rmode));
4535 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
4537 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
4541 /* The whole arg fits in gprs. */
4542 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4543 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
4547 /* It's entirely in memory. */
4548 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
4551 /* Describe where this piece goes in the fprs. */
4552 r = gen_rtx_REG (fmode, cum->fregno);
4553 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
4555 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4557 else if (align_words < GP_ARG_NUM_REG)
4559 if (TARGET_32BIT && TARGET_POWERPC64)
4560 return rs6000_mixed_function_arg (mode, type, align_words);
4562 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4569 /* For an arg passed partly in registers and partly in memory, this is
4570 the number of registers used. For args passed entirely in registers
4571 or entirely in memory, zero. When an arg is described by a PARALLEL,
4572 perhaps using more than one register type, this function returns the
4573 number of registers used by the first element of the PARALLEL. */
4576 function_arg_partial_nregs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4577 tree type, int named)
4584 if (DEFAULT_ABI == ABI_V4)
4587 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
4588 && cum->nargs_prototype >= 0)
4591 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
4592 parm_offset = TARGET_32BIT ? 2 : 0;
4593 align_words = cum->words + ((parm_offset - cum->words) & align);
4595 if (USE_FP_FOR_ARG_P (cum, mode, type)
4596 /* If we are passing this arg in gprs as well, then this function
4597 should return the number of gprs (or memory) partially passed,
4598 *not* the number of fprs. */
4600 && (cum->nargs_prototype <= 0
4601 || (DEFAULT_ABI == ABI_AIX
4603 && align_words >= GP_ARG_NUM_REG))))
4605 if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3) > FP_ARG_MAX_REG + 1)
4606 ret = FP_ARG_MAX_REG + 1 - cum->fregno;
4607 else if (cum->nargs_prototype >= 0)
4611 if (align_words < GP_ARG_NUM_REG
4612 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
4613 ret = GP_ARG_NUM_REG - align_words;
4615 if (ret != 0 && TARGET_DEBUG_ARG)
4616 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
4621 /* A C expression that indicates when an argument must be passed by
4622 reference. If nonzero for an argument, a copy of that argument is
4623 made in memory and a pointer to the argument is passed instead of
4624 the argument itself. The pointer is passed in whatever way is
4625 appropriate for passing a pointer to that type.
4627 Under V.4, aggregates and long double are passed by reference.
4629 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
4630 reference unless the AltiVec vector extension ABI is in force.
4632 As an extension to all ABIs, variable sized types are passed by
4636 function_arg_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
4637 enum machine_mode mode ATTRIBUTE_UNUSED,
4638 tree type, int named ATTRIBUTE_UNUSED)
4640 if ((DEFAULT_ABI == ABI_V4
4641 && ((type && AGGREGATE_TYPE_P (type))
4643 || (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4644 || (type && int_size_in_bytes (type) < 0))
4646 if (TARGET_DEBUG_ARG)
4647 fprintf (stderr, "function_arg_pass_by_reference\n");
4655 rs6000_move_block_from_reg (int regno, rtx x, int nregs)
4658 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
4663 for (i = 0; i < nregs; i++)
4665 rtx tem = adjust_address_nv (x, reg_mode, i*GET_MODE_SIZE(reg_mode));
4666 if (reload_completed)
4668 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
4671 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
4672 i * GET_MODE_SIZE(reg_mode));
4675 tem = replace_equiv_address (tem, XEXP (tem, 0));
4677 if (tem == NULL_RTX)
4680 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
4685 /* Perform any needed actions needed for a function that is receiving a
4686 variable number of arguments.
4690 MODE and TYPE are the mode and type of the current parameter.
4692 PRETEND_SIZE is a variable that should be set to the amount of stack
4693 that must be pushed by the prolog to pretend that our caller pushed
4696 Normally, this macro will push all remaining incoming registers on the
4697 stack and set PRETEND_SIZE to the length of the registers pushed. */
4700 setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4701 tree type, int *pretend_size ATTRIBUTE_UNUSED, int no_rtl)
4703 CUMULATIVE_ARGS next_cum;
4704 int reg_size = TARGET_32BIT ? 4 : 8;
4705 rtx save_area = NULL_RTX, mem;
4706 int first_reg_offset, set;
4708 /* Skip the last named argument. */
4710 function_arg_advance (&next_cum, mode, type, 1);
4712 if (DEFAULT_ABI == ABI_V4)
4714 /* Indicate to allocate space on the stack for varargs save area. */
4715 cfun->machine->sysv_varargs_p = 1;
4717 save_area = plus_constant (virtual_stack_vars_rtx,
4718 - RS6000_VARARGS_SIZE);
4720 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
4724 first_reg_offset = next_cum.words;
4725 save_area = virtual_incoming_args_rtx;
4726 cfun->machine->sysv_varargs_p = 0;
4728 if (MUST_PASS_IN_STACK (mode, type))
4729 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
4732 set = get_varargs_alias_set ();
4733 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
4735 mem = gen_rtx_MEM (BLKmode,
4736 plus_constant (save_area,
4737 first_reg_offset * reg_size)),
4738 set_mem_alias_set (mem, set);
4739 set_mem_align (mem, BITS_PER_WORD);
4741 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
4742 GP_ARG_NUM_REG - first_reg_offset);
4745 /* Save FP registers if needed. */
4746 if (DEFAULT_ABI == ABI_V4
4747 && TARGET_HARD_FLOAT && TARGET_FPRS
4749 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
4751 int fregno = next_cum.fregno;
4752 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4753 rtx lab = gen_label_rtx ();
4754 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
4756 emit_jump_insn (gen_rtx_SET (VOIDmode,
4758 gen_rtx_IF_THEN_ELSE (VOIDmode,
4759 gen_rtx_NE (VOIDmode, cr1,
4761 gen_rtx_LABEL_REF (VOIDmode, lab),
4764 while (fregno <= FP_ARG_V4_MAX_REG)
4766 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
4767 set_mem_alias_set (mem, set);
4768 set_mem_align (mem, GET_MODE_ALIGNMENT (DFmode));
4769 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4778 /* Create the va_list data type. */
4781 rs6000_build_builtin_va_list (void)
4783 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4785 /* For AIX, prefer 'char *' because that's what the system
4786 header files like. */
4787 if (DEFAULT_ABI != ABI_V4)
4788 return build_pointer_type (char_type_node);
4790 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
4791 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
4793 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
4794 unsigned_char_type_node);
4795 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
4796 unsigned_char_type_node);
4797 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
4799 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
4800 short_unsigned_type_node);
4801 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
4803 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
4806 DECL_FIELD_CONTEXT (f_gpr) = record;
4807 DECL_FIELD_CONTEXT (f_fpr) = record;
4808 DECL_FIELD_CONTEXT (f_res) = record;
4809 DECL_FIELD_CONTEXT (f_ovf) = record;
4810 DECL_FIELD_CONTEXT (f_sav) = record;
4812 TREE_CHAIN (record) = type_decl;
4813 TYPE_NAME (record) = type_decl;
4814 TYPE_FIELDS (record) = f_gpr;
4815 TREE_CHAIN (f_gpr) = f_fpr;
4816 TREE_CHAIN (f_fpr) = f_res;
4817 TREE_CHAIN (f_res) = f_ovf;
4818 TREE_CHAIN (f_ovf) = f_sav;
4820 layout_type (record);
4822 /* The correct type is an array type of one element. */
4823 return build_array_type (record, build_index_type (size_zero_node));
4826 /* Implement va_start. */
4829 rs6000_va_start (tree valist, rtx nextarg)
4831 HOST_WIDE_INT words, n_gpr, n_fpr;
4832 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4833 tree gpr, fpr, ovf, sav, t;
4835 /* Only SVR4 needs something special. */
4836 if (DEFAULT_ABI != ABI_V4)
4838 std_expand_builtin_va_start (valist, nextarg);
4842 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4843 f_fpr = TREE_CHAIN (f_gpr);
4844 f_res = TREE_CHAIN (f_fpr);
4845 f_ovf = TREE_CHAIN (f_res);
4846 f_sav = TREE_CHAIN (f_ovf);
4848 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4849 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4850 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4851 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4852 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4854 /* Count number of gp and fp argument registers used. */
4855 words = current_function_args_info.words;
4856 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
4857 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
4859 if (TARGET_DEBUG_ARG)
4860 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
4861 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
4862 words, n_gpr, n_fpr);
4864 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
4865 TREE_SIDE_EFFECTS (t) = 1;
4866 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4868 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
4869 TREE_SIDE_EFFECTS (t) = 1;
4870 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4872 /* Find the overflow area. */
4873 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
4875 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
4876 build_int_2 (words * UNITS_PER_WORD, 0));
4877 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4878 TREE_SIDE_EFFECTS (t) = 1;
4879 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4881 /* Find the register save area. */
4882 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
4883 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
4884 build_int_2 (-RS6000_VARARGS_SIZE, -1));
4885 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
4886 TREE_SIDE_EFFECTS (t) = 1;
4887 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4890 /* Implement va_arg. */
4893 rs6000_va_arg (tree valist, tree type)
4895 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4896 tree gpr, fpr, ovf, sav, reg, t, u;
4897 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
4898 rtx lab_false, lab_over, addr_rtx, r;
4901 if (DEFAULT_ABI != ABI_V4)
4903 /* Variable sized types are passed by reference, as are AltiVec
4904 vectors when 32-bit and not using the AltiVec ABI extension. */
4905 if (int_size_in_bytes (type) < 0
4907 && !TARGET_ALTIVEC_ABI
4908 && ALTIVEC_VECTOR_MODE (TYPE_MODE (type))))
4910 u = build_pointer_type (type);
4912 /* Args grow upward. */
4913 t = build (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
4914 build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
4915 TREE_SIDE_EFFECTS (t) = 1;
4917 t = build1 (NOP_EXPR, build_pointer_type (u), t);
4918 TREE_SIDE_EFFECTS (t) = 1;
4920 t = build1 (INDIRECT_REF, u, t);
4921 TREE_SIDE_EFFECTS (t) = 1;
4923 return expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4925 if (targetm.calls.split_complex_arg
4926 && TREE_CODE (type) == COMPLEX_TYPE)
4928 tree elem_type = TREE_TYPE (type);
4929 enum machine_mode elem_mode = TYPE_MODE (elem_type);
4930 int elem_size = GET_MODE_SIZE (elem_mode);
4932 if (elem_size < UNITS_PER_WORD)
4934 rtx real_part, imag_part, dest_real, rr;
4936 real_part = rs6000_va_arg (valist, elem_type);
4937 imag_part = rs6000_va_arg (valist, elem_type);
4939 /* We're not returning the value here, but the address.
4940 real_part and imag_part are not contiguous, and we know
4941 there is space available to pack real_part next to
4942 imag_part. float _Complex is not promoted to
4943 double _Complex by the default promotion rules that
4944 promote float to double. */
4945 if (2 * elem_size > UNITS_PER_WORD)
4948 real_part = gen_rtx_MEM (elem_mode, real_part);
4949 imag_part = gen_rtx_MEM (elem_mode, imag_part);
4951 dest_real = adjust_address (imag_part, elem_mode, -elem_size);
4952 rr = gen_reg_rtx (elem_mode);
4953 emit_move_insn (rr, real_part);
4954 emit_move_insn (dest_real, rr);
4956 return XEXP (dest_real, 0);
4960 return std_expand_builtin_va_arg (valist, type);
4963 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4964 f_fpr = TREE_CHAIN (f_gpr);
4965 f_res = TREE_CHAIN (f_fpr);
4966 f_ovf = TREE_CHAIN (f_res);
4967 f_sav = TREE_CHAIN (f_ovf);
4969 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4970 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4971 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4972 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4973 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4975 size = int_size_in_bytes (type);
4976 rsize = (size + 3) / 4;
4979 if (AGGREGATE_TYPE_P (type)
4980 || TYPE_MODE (type) == TFmode
4981 || (!TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type))))
4983 /* Aggregates, long doubles, and AltiVec vectors are passed by
4993 else if (TARGET_HARD_FLOAT && TARGET_FPRS
4994 && (TYPE_MODE (type) == SFmode || TYPE_MODE (type) == DFmode))
4996 /* FP args go in FP registers, if present. */
5002 if (TYPE_MODE (type) == DFmode)
5007 /* Otherwise into GP registers. */
5017 /* Pull the value out of the saved registers.... */
5019 lab_over = NULL_RTX;
5020 addr_rtx = gen_reg_rtx (Pmode);
5022 /* AltiVec vectors never go in registers when -mabi=altivec. */
5023 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
5027 lab_false = gen_label_rtx ();
5028 lab_over = gen_label_rtx ();
5030 /* Long long and SPE vectors are aligned in the registers.
5031 As are any other 2 gpr item such as complex int due to a
5032 historical mistake. */
5036 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
5037 build_int_2 (n_reg - 1, 0));
5038 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
5039 TREE_SIDE_EFFECTS (u) = 1;
5042 emit_cmp_and_jump_insns
5043 (expand_expr (u, NULL_RTX, QImode, EXPAND_NORMAL),
5044 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
5049 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
5051 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
5052 build_int_2 (n_reg, 0));
5053 TREE_SIDE_EFFECTS (u) = 1;
5055 u = build1 (CONVERT_EXPR, integer_type_node, u);
5056 TREE_SIDE_EFFECTS (u) = 1;
5058 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
5059 TREE_SIDE_EFFECTS (u) = 1;
5061 t = build (PLUS_EXPR, ptr_type_node, t, u);
5062 TREE_SIDE_EFFECTS (t) = 1;
5064 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
5066 emit_move_insn (addr_rtx, r);
5068 emit_jump_insn (gen_jump (lab_over));
5071 emit_label (lab_false);
5074 /* Ensure that we don't find any more args in regs.
5075 Alignment has taken care of the n_reg == 2 case. */
5076 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
5077 TREE_SIDE_EFFECTS (t) = 1;
5078 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5082 /* ... otherwise out of the overflow area. */
5084 /* Care for on-stack alignment if needed. */
5088 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (align - 1, 0));
5089 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align, -1));
5093 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
5095 emit_move_insn (addr_rtx, r);
5097 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
5098 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5099 TREE_SIDE_EFFECTS (t) = 1;
5100 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5103 emit_label (lab_over);
5107 r = gen_rtx_MEM (Pmode, addr_rtx);
5108 set_mem_alias_set (r, get_varargs_alias_set ());
5109 emit_move_insn (addr_rtx, r);
5117 #define def_builtin(MASK, NAME, TYPE, CODE) \
5119 if ((MASK) & target_flags) \
5120 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
5124 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
5126 static const struct builtin_description bdesc_3arg[] =
5128 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
5129 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
5130 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
5131 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
5132 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
5133 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
5134 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
5135 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
5136 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
5137 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
5138 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
5139 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
5140 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
5141 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
5142 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
5143 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
5144 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
5145 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
5146 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
5147 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
5148 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
5149 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
5150 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
5153 /* DST operations: void foo (void *, const int, const char). */
5155 static const struct builtin_description bdesc_dst[] =
5157 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
5158 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
5159 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
5160 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
5163 /* Simple binary operations: VECc = foo (VECa, VECb). */
5165 static struct builtin_description bdesc_2arg[] =
5167 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
5168 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
5169 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
5170 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
5171 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
5172 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
5173 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
5174 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
5175 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
5176 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
5177 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
5178 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
5179 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
5180 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
5181 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
5182 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
5183 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
5184 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
5185 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
5186 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
5187 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
5188 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
5189 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
5190 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
5191 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
5192 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
5193 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
5194 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
5195 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
5196 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
5197 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
5198 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
5199 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
5200 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
5201 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
5202 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
5203 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
5204 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
5205 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
5206 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
5207 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
5208 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
5209 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
5210 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
5211 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
5212 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
5213 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
5214 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
5215 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
5216 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
5217 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
5218 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
5219 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
5220 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
5221 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
5222 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
5223 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
5224 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
5225 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
5226 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
5227 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
5228 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
5229 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
5230 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
5231 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
5232 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
5233 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
5234 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
5235 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
5236 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
5237 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
5238 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
5239 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
5240 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
5241 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
5242 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
5243 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
5244 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
5245 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
5246 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
5247 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
5248 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
5249 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
5250 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
5251 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
5252 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
5253 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
5254 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
5255 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
5256 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
5257 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
5258 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
5259 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
5260 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
5261 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
5262 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
5263 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
5264 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
5265 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
5266 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
5267 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
5268 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
5269 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
5270 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
5271 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
5272 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
5273 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
5274 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
5275 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
5276 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
5277 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
5278 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
5279 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
5281 /* Place holder, leave as first spe builtin. */
5282 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
5283 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
5284 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
5285 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
5286 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
5287 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
5288 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
5289 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
5290 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
5291 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
5292 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
5293 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
5294 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
5295 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
5296 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
5297 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
5298 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
5299 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
5300 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
5301 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
5302 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
5303 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
5304 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
5305 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
5306 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
5307 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
5308 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
5309 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
5310 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
5311 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
5312 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
5313 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
5314 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
5315 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
5316 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
5317 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
5318 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
5319 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
5320 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
5321 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
5322 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
5323 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
5324 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
5325 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
5326 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
5327 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
5328 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
5329 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
5330 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
5331 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
5332 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
5333 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
5334 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
5335 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
5336 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
5337 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
5338 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
5339 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
5340 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
5341 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
5342 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
5343 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
5344 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
5345 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
5346 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
5347 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
5348 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
5349 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
5350 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
5351 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
5352 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
5353 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
5354 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
5355 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
5356 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
5357 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
5358 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
5359 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
5360 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
5361 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
5362 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
5363 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
5364 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
5365 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
5366 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
5367 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
5368 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
5369 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
5370 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
5371 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
5372 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
5373 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
5374 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
5375 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
5376 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
5377 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
5378 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
5379 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
5380 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
5381 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
5382 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
5383 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
5384 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
5385 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
5386 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
5387 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
5388 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
5389 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
5390 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
5392 /* SPE binary operations expecting a 5-bit unsigned literal. */
5393 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
5395 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
5396 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
5397 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
5398 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
5399 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
5400 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
5401 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
5402 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
5403 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
5404 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
5405 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
5406 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
5407 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
5408 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
5409 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
5410 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
5411 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
5412 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
5413 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
5414 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
5415 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
5416 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
5417 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
5418 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
5419 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
5420 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
5422 /* Place-holder. Leave as last binary SPE builtin. */
5423 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
5426 /* AltiVec predicates. */
5428 struct builtin_description_predicates
5430 const unsigned int mask;
5431 const enum insn_code icode;
5433 const char *const name;
5434 const enum rs6000_builtins code;
5437 static const struct builtin_description_predicates bdesc_altivec_preds[] =
5439 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
5440 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
5441 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
5442 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
5443 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
5444 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
5445 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
5446 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
5447 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
5448 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
5449 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
5450 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
5451 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
5454 /* SPE predicates. */
5455 static struct builtin_description bdesc_spe_predicates[] =
5457 /* Place-holder. Leave as first. */
5458 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
5459 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
5460 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
5461 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
5462 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
5463 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
5464 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
5465 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
5466 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
5467 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
5468 /* Place-holder. Leave as last. */
5469 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
5472 /* SPE evsel predicates. */
5473 static struct builtin_description bdesc_spe_evsel[] =
5475 /* Place-holder. Leave as first. */
5476 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
5477 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
5478 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
5479 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
5480 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
5481 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
5482 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
5483 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
5484 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
5485 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
5486 /* Place-holder. Leave as last. */
5487 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
5490 /* ABS* operations. */
5492 static const struct builtin_description bdesc_abs[] =
5494 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
5495 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
5496 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
5497 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
5498 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
5499 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
5500 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
5503 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
5506 static struct builtin_description bdesc_1arg[] =
5508 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
5509 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
5510 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
5511 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
5512 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
5513 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
5514 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
5515 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
5516 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
5517 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
5518 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
5519 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
5520 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
5521 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
5522 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
5523 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
5524 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
5526 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
5527 end with SPE_BUILTIN_EVSUBFUSIAAW. */
5528 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
5529 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
5530 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
5531 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
5532 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
5533 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
5534 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
5535 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
5536 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
5537 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
5538 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
5539 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
5540 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
5541 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
5542 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
5543 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
5544 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
5545 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
5546 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
5547 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
5548 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
5549 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
5550 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
5551 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
5552 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
5553 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
5554 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
5555 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
5557 /* Place-holder. Leave as last unary SPE builtin. */
5558 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
5562 rs6000_expand_unop_builtin (enum insn_code icode, tree arglist, rtx target)
5565 tree arg0 = TREE_VALUE (arglist);
5566 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5567 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5568 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5570 if (icode == CODE_FOR_nothing)
5571 /* Builtin not supported on this processor. */
5574 /* If we got invalid arguments bail out before generating bad rtl. */
5575 if (arg0 == error_mark_node)
5578 if (icode == CODE_FOR_altivec_vspltisb
5579 || icode == CODE_FOR_altivec_vspltish
5580 || icode == CODE_FOR_altivec_vspltisw
5581 || icode == CODE_FOR_spe_evsplatfi
5582 || icode == CODE_FOR_spe_evsplati)
5584 /* Only allow 5-bit *signed* literals. */
5585 if (GET_CODE (op0) != CONST_INT
5586 || INTVAL (op0) > 0x1f
5587 || INTVAL (op0) < -0x1f)
5589 error ("argument 1 must be a 5-bit signed literal");
5595 || GET_MODE (target) != tmode
5596 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5597 target = gen_reg_rtx (tmode);
5599 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5600 op0 = copy_to_mode_reg (mode0, op0);
5602 pat = GEN_FCN (icode) (target, op0);
5611 altivec_expand_abs_builtin (enum insn_code icode, tree arglist, rtx target)
5613 rtx pat, scratch1, scratch2;
5614 tree arg0 = TREE_VALUE (arglist);
5615 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5616 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5617 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5619 /* If we have invalid arguments, bail out before generating bad rtl. */
5620 if (arg0 == error_mark_node)
5624 || GET_MODE (target) != tmode
5625 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5626 target = gen_reg_rtx (tmode);
5628 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5629 op0 = copy_to_mode_reg (mode0, op0);
5631 scratch1 = gen_reg_rtx (mode0);
5632 scratch2 = gen_reg_rtx (mode0);
5634 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
5643 rs6000_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
5646 tree arg0 = TREE_VALUE (arglist);
5647 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5648 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5649 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5650 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5651 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5652 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5654 if (icode == CODE_FOR_nothing)
5655 /* Builtin not supported on this processor. */
5658 /* If we got invalid arguments bail out before generating bad rtl. */
5659 if (arg0 == error_mark_node || arg1 == error_mark_node)
5662 if (icode == CODE_FOR_altivec_vcfux
5663 || icode == CODE_FOR_altivec_vcfsx
5664 || icode == CODE_FOR_altivec_vctsxs
5665 || icode == CODE_FOR_altivec_vctuxs
5666 || icode == CODE_FOR_altivec_vspltb
5667 || icode == CODE_FOR_altivec_vsplth
5668 || icode == CODE_FOR_altivec_vspltw
5669 || icode == CODE_FOR_spe_evaddiw
5670 || icode == CODE_FOR_spe_evldd
5671 || icode == CODE_FOR_spe_evldh
5672 || icode == CODE_FOR_spe_evldw
5673 || icode == CODE_FOR_spe_evlhhesplat
5674 || icode == CODE_FOR_spe_evlhhossplat
5675 || icode == CODE_FOR_spe_evlhhousplat
5676 || icode == CODE_FOR_spe_evlwhe
5677 || icode == CODE_FOR_spe_evlwhos
5678 || icode == CODE_FOR_spe_evlwhou
5679 || icode == CODE_FOR_spe_evlwhsplat
5680 || icode == CODE_FOR_spe_evlwwsplat
5681 || icode == CODE_FOR_spe_evrlwi
5682 || icode == CODE_FOR_spe_evslwi
5683 || icode == CODE_FOR_spe_evsrwis
5684 || icode == CODE_FOR_spe_evsubifw
5685 || icode == CODE_FOR_spe_evsrwiu)
5687 /* Only allow 5-bit unsigned literals. */
5689 if (TREE_CODE (arg1) != INTEGER_CST
5690 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5692 error ("argument 2 must be a 5-bit unsigned literal");
5698 || GET_MODE (target) != tmode
5699 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5700 target = gen_reg_rtx (tmode);
5702 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5703 op0 = copy_to_mode_reg (mode0, op0);
5704 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5705 op1 = copy_to_mode_reg (mode1, op1);
5707 pat = GEN_FCN (icode) (target, op0, op1);
5716 altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5717 tree arglist, rtx target)
5720 tree cr6_form = TREE_VALUE (arglist);
5721 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5722 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5723 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5724 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5725 enum machine_mode tmode = SImode;
5726 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5727 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5730 if (TREE_CODE (cr6_form) != INTEGER_CST)
5732 error ("argument 1 of __builtin_altivec_predicate must be a constant");
5736 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
5741 /* If we have invalid arguments, bail out before generating bad rtl. */
5742 if (arg0 == error_mark_node || arg1 == error_mark_node)
5746 || GET_MODE (target) != tmode
5747 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5748 target = gen_reg_rtx (tmode);
5750 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5751 op0 = copy_to_mode_reg (mode0, op0);
5752 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5753 op1 = copy_to_mode_reg (mode1, op1);
5755 scratch = gen_reg_rtx (mode0);
5757 pat = GEN_FCN (icode) (scratch, op0, op1,
5758 gen_rtx (SYMBOL_REF, Pmode, opcode));
5763 /* The vec_any* and vec_all* predicates use the same opcodes for two
5764 different operations, but the bits in CR6 will be different
5765 depending on what information we want. So we have to play tricks
5766 with CR6 to get the right bits out.
5768 If you think this is disgusting, look at the specs for the
5769 AltiVec predicates. */
5771 switch (cr6_form_int)
5774 emit_insn (gen_cr6_test_for_zero (target));
5777 emit_insn (gen_cr6_test_for_zero_reverse (target));
5780 emit_insn (gen_cr6_test_for_lt (target));
5783 emit_insn (gen_cr6_test_for_lt_reverse (target));
5786 error ("argument 1 of __builtin_altivec_predicate is out of range");
5794 altivec_expand_lv_builtin (enum insn_code icode, tree arglist, rtx target)
5797 tree arg0 = TREE_VALUE (arglist);
5798 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5799 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5800 enum machine_mode mode0 = Pmode;
5801 enum machine_mode mode1 = Pmode;
5802 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5803 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5805 if (icode == CODE_FOR_nothing)
5806 /* Builtin not supported on this processor. */
5809 /* If we got invalid arguments bail out before generating bad rtl. */
5810 if (arg0 == error_mark_node || arg1 == error_mark_node)
5814 || GET_MODE (target) != tmode
5815 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5816 target = gen_reg_rtx (tmode);
5818 op1 = copy_to_mode_reg (mode1, op1);
5820 if (op0 == const0_rtx)
5822 addr = gen_rtx_MEM (tmode, op1);
5826 op0 = copy_to_mode_reg (mode0, op0);
5827 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
5830 pat = GEN_FCN (icode) (target, addr);
5840 spe_expand_stv_builtin (enum insn_code icode, tree arglist)
5842 tree arg0 = TREE_VALUE (arglist);
5843 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5844 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5845 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5846 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5847 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5849 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
5850 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
5851 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
5853 /* Invalid arguments. Bail before doing anything stoopid! */
5854 if (arg0 == error_mark_node
5855 || arg1 == error_mark_node
5856 || arg2 == error_mark_node)
5859 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
5860 op0 = copy_to_mode_reg (mode2, op0);
5861 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
5862 op1 = copy_to_mode_reg (mode0, op1);
5863 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5864 op2 = copy_to_mode_reg (mode1, op2);
5866 pat = GEN_FCN (icode) (op1, op2, op0);
5873 altivec_expand_stv_builtin (enum insn_code icode, tree arglist)
5875 tree arg0 = TREE_VALUE (arglist);
5876 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5877 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5878 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5879 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5880 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5882 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5883 enum machine_mode mode1 = Pmode;
5884 enum machine_mode mode2 = Pmode;
5886 /* Invalid arguments. Bail before doing anything stoopid! */
5887 if (arg0 == error_mark_node
5888 || arg1 == error_mark_node
5889 || arg2 == error_mark_node)
5892 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
5893 op0 = copy_to_mode_reg (tmode, op0);
5895 op2 = copy_to_mode_reg (mode2, op2);
5897 if (op1 == const0_rtx)
5899 addr = gen_rtx_MEM (tmode, op2);
5903 op1 = copy_to_mode_reg (mode1, op1);
5904 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
5907 pat = GEN_FCN (icode) (addr, op0);
5914 rs6000_expand_ternop_builtin (enum insn_code icode, tree arglist, rtx target)
5917 tree arg0 = TREE_VALUE (arglist);
5918 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5919 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5920 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5921 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5922 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5923 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5924 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5925 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5926 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
5928 if (icode == CODE_FOR_nothing)
5929 /* Builtin not supported on this processor. */
5932 /* If we got invalid arguments bail out before generating bad rtl. */
5933 if (arg0 == error_mark_node
5934 || arg1 == error_mark_node
5935 || arg2 == error_mark_node)
5938 if (icode == CODE_FOR_altivec_vsldoi_4sf
5939 || icode == CODE_FOR_altivec_vsldoi_4si
5940 || icode == CODE_FOR_altivec_vsldoi_8hi
5941 || icode == CODE_FOR_altivec_vsldoi_16qi)
5943 /* Only allow 4-bit unsigned literals. */
5944 if (TREE_CODE (arg2) != INTEGER_CST
5945 || TREE_INT_CST_LOW (arg2) & ~0xf)
5947 error ("argument 3 must be a 4-bit unsigned literal");
5953 || GET_MODE (target) != tmode
5954 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5955 target = gen_reg_rtx (tmode);
5957 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5958 op0 = copy_to_mode_reg (mode0, op0);
5959 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5960 op1 = copy_to_mode_reg (mode1, op1);
5961 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
5962 op2 = copy_to_mode_reg (mode2, op2);
5964 pat = GEN_FCN (icode) (target, op0, op1, op2);
5972 /* Expand the lvx builtins. */
5974 altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
5976 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5977 tree arglist = TREE_OPERAND (exp, 1);
5978 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5980 enum machine_mode tmode, mode0;
5982 enum insn_code icode;
5986 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
5987 icode = CODE_FOR_altivec_lvx_16qi;
5989 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
5990 icode = CODE_FOR_altivec_lvx_8hi;
5992 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
5993 icode = CODE_FOR_altivec_lvx_4si;
5995 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
5996 icode = CODE_FOR_altivec_lvx_4sf;
6005 arg0 = TREE_VALUE (arglist);
6006 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6007 tmode = insn_data[icode].operand[0].mode;
6008 mode0 = insn_data[icode].operand[1].mode;
6011 || GET_MODE (target) != tmode
6012 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6013 target = gen_reg_rtx (tmode);
6015 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6016 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6018 pat = GEN_FCN (icode) (target, op0);
6025 /* Expand the stvx builtins. */
6027 altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6030 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6031 tree arglist = TREE_OPERAND (exp, 1);
6032 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6034 enum machine_mode mode0, mode1;
6036 enum insn_code icode;
6040 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
6041 icode = CODE_FOR_altivec_stvx_16qi;
6043 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
6044 icode = CODE_FOR_altivec_stvx_8hi;
6046 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
6047 icode = CODE_FOR_altivec_stvx_4si;
6049 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
6050 icode = CODE_FOR_altivec_stvx_4sf;
6057 arg0 = TREE_VALUE (arglist);
6058 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6059 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6060 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6061 mode0 = insn_data[icode].operand[0].mode;
6062 mode1 = insn_data[icode].operand[1].mode;
6064 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6065 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6066 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
6067 op1 = copy_to_mode_reg (mode1, op1);
6069 pat = GEN_FCN (icode) (op0, op1);
6077 /* Expand the dst builtins. */
6079 altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6082 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6083 tree arglist = TREE_OPERAND (exp, 1);
6084 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6085 tree arg0, arg1, arg2;
6086 enum machine_mode mode0, mode1, mode2;
6087 rtx pat, op0, op1, op2;
6088 struct builtin_description *d;
6093 /* Handle DST variants. */
6094 d = (struct builtin_description *) bdesc_dst;
6095 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
6096 if (d->code == fcode)
6098 arg0 = TREE_VALUE (arglist);
6099 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6100 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6101 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6102 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6103 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6104 mode0 = insn_data[d->icode].operand[0].mode;
6105 mode1 = insn_data[d->icode].operand[1].mode;
6106 mode2 = insn_data[d->icode].operand[2].mode;
6108 /* Invalid arguments, bail out before generating bad rtl. */
6109 if (arg0 == error_mark_node
6110 || arg1 == error_mark_node
6111 || arg2 == error_mark_node)
6116 if (TREE_CODE (arg2) != INTEGER_CST
6117 || TREE_INT_CST_LOW (arg2) & ~0x3)
6119 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
6123 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
6124 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6125 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
6126 op1 = copy_to_mode_reg (mode1, op1);
6128 pat = GEN_FCN (d->icode) (op0, op1, op2);
6138 /* Expand the builtin in EXP and store the result in TARGET. Store
6139 true in *EXPANDEDP if we found a builtin to expand. */
6141 altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
6143 struct builtin_description *d;
6144 struct builtin_description_predicates *dp;
6146 enum insn_code icode;
6147 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6148 tree arglist = TREE_OPERAND (exp, 1);
6151 enum machine_mode tmode, mode0;
6152 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6154 target = altivec_expand_ld_builtin (exp, target, expandedp);
6158 target = altivec_expand_st_builtin (exp, target, expandedp);
6162 target = altivec_expand_dst_builtin (exp, target, expandedp);
6170 case ALTIVEC_BUILTIN_STVX:
6171 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
6172 case ALTIVEC_BUILTIN_STVEBX:
6173 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
6174 case ALTIVEC_BUILTIN_STVEHX:
6175 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
6176 case ALTIVEC_BUILTIN_STVEWX:
6177 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
6178 case ALTIVEC_BUILTIN_STVXL:
6179 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
6181 case ALTIVEC_BUILTIN_MFVSCR:
6182 icode = CODE_FOR_altivec_mfvscr;
6183 tmode = insn_data[icode].operand[0].mode;
6186 || GET_MODE (target) != tmode
6187 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6188 target = gen_reg_rtx (tmode);
6190 pat = GEN_FCN (icode) (target);
6196 case ALTIVEC_BUILTIN_MTVSCR:
6197 icode = CODE_FOR_altivec_mtvscr;
6198 arg0 = TREE_VALUE (arglist);
6199 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6200 mode0 = insn_data[icode].operand[0].mode;
6202 /* If we got invalid arguments bail out before generating bad rtl. */
6203 if (arg0 == error_mark_node)
6206 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6207 op0 = copy_to_mode_reg (mode0, op0);
6209 pat = GEN_FCN (icode) (op0);
6214 case ALTIVEC_BUILTIN_DSSALL:
6215 emit_insn (gen_altivec_dssall ());
6218 case ALTIVEC_BUILTIN_DSS:
6219 icode = CODE_FOR_altivec_dss;
6220 arg0 = TREE_VALUE (arglist);
6222 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6223 mode0 = insn_data[icode].operand[0].mode;
6225 /* If we got invalid arguments bail out before generating bad rtl. */
6226 if (arg0 == error_mark_node)
6229 if (TREE_CODE (arg0) != INTEGER_CST
6230 || TREE_INT_CST_LOW (arg0) & ~0x3)
6232 error ("argument to dss must be a 2-bit unsigned literal");
6236 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6237 op0 = copy_to_mode_reg (mode0, op0);
6239 emit_insn (gen_altivec_dss (op0));
6242 case ALTIVEC_BUILTIN_COMPILETIME_ERROR:
6243 arg0 = TREE_VALUE (arglist);
6244 while (TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == ADDR_EXPR)
6245 arg0 = TREE_OPERAND (arg0, 0);
6246 error ("invalid parameter combination for `%s' AltiVec intrinsic",
6247 TREE_STRING_POINTER (arg0));
6252 /* Expand abs* operations. */
6253 d = (struct builtin_description *) bdesc_abs;
6254 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6255 if (d->code == fcode)
6256 return altivec_expand_abs_builtin (d->icode, arglist, target);
6258 /* Expand the AltiVec predicates. */
6259 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6260 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6261 if (dp->code == fcode)
6262 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
6264 /* LV* are funky. We initialized them differently. */
6267 case ALTIVEC_BUILTIN_LVSL:
6268 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
6270 case ALTIVEC_BUILTIN_LVSR:
6271 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
6273 case ALTIVEC_BUILTIN_LVEBX:
6274 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
6276 case ALTIVEC_BUILTIN_LVEHX:
6277 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
6279 case ALTIVEC_BUILTIN_LVEWX:
6280 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
6282 case ALTIVEC_BUILTIN_LVXL:
6283 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
6285 case ALTIVEC_BUILTIN_LVX:
6286 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
6297 /* Binops that need to be initialized manually, but can be expanded
6298 automagically by rs6000_expand_binop_builtin. */
6299 static struct builtin_description bdesc_2arg_spe[] =
6301 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
6302 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
6303 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
6304 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
6305 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
6306 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
6307 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
6308 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
6309 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
6310 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
6311 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
6312 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
6313 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
6314 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
6315 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
6316 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
6317 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
6318 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
6319 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
6320 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
6321 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
6322 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
6325 /* Expand the builtin in EXP and store the result in TARGET. Store
6326 true in *EXPANDEDP if we found a builtin to expand.
6328 This expands the SPE builtins that are not simple unary and binary
6331 spe_expand_builtin (tree exp, rtx target, bool *expandedp)
6333 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6334 tree arglist = TREE_OPERAND (exp, 1);
6336 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6337 enum insn_code icode;
6338 enum machine_mode tmode, mode0;
6340 struct builtin_description *d;
6345 /* Syntax check for a 5-bit unsigned immediate. */
6348 case SPE_BUILTIN_EVSTDD:
6349 case SPE_BUILTIN_EVSTDH:
6350 case SPE_BUILTIN_EVSTDW:
6351 case SPE_BUILTIN_EVSTWHE:
6352 case SPE_BUILTIN_EVSTWHO:
6353 case SPE_BUILTIN_EVSTWWE:
6354 case SPE_BUILTIN_EVSTWWO:
6355 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6356 if (TREE_CODE (arg1) != INTEGER_CST
6357 || TREE_INT_CST_LOW (arg1) & ~0x1f)
6359 error ("argument 2 must be a 5-bit unsigned literal");
6367 /* The evsplat*i instructions are not quite generic. */
6370 case SPE_BUILTIN_EVSPLATFI:
6371 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
6373 case SPE_BUILTIN_EVSPLATI:
6374 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
6380 d = (struct builtin_description *) bdesc_2arg_spe;
6381 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
6382 if (d->code == fcode)
6383 return rs6000_expand_binop_builtin (d->icode, arglist, target);
6385 d = (struct builtin_description *) bdesc_spe_predicates;
6386 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
6387 if (d->code == fcode)
6388 return spe_expand_predicate_builtin (d->icode, arglist, target);
6390 d = (struct builtin_description *) bdesc_spe_evsel;
6391 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
6392 if (d->code == fcode)
6393 return spe_expand_evsel_builtin (d->icode, arglist, target);
6397 case SPE_BUILTIN_EVSTDDX:
6398 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
6399 case SPE_BUILTIN_EVSTDHX:
6400 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
6401 case SPE_BUILTIN_EVSTDWX:
6402 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
6403 case SPE_BUILTIN_EVSTWHEX:
6404 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
6405 case SPE_BUILTIN_EVSTWHOX:
6406 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
6407 case SPE_BUILTIN_EVSTWWEX:
6408 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
6409 case SPE_BUILTIN_EVSTWWOX:
6410 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
6411 case SPE_BUILTIN_EVSTDD:
6412 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
6413 case SPE_BUILTIN_EVSTDH:
6414 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
6415 case SPE_BUILTIN_EVSTDW:
6416 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
6417 case SPE_BUILTIN_EVSTWHE:
6418 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
6419 case SPE_BUILTIN_EVSTWHO:
6420 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
6421 case SPE_BUILTIN_EVSTWWE:
6422 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
6423 case SPE_BUILTIN_EVSTWWO:
6424 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
6425 case SPE_BUILTIN_MFSPEFSCR:
6426 icode = CODE_FOR_spe_mfspefscr;
6427 tmode = insn_data[icode].operand[0].mode;
6430 || GET_MODE (target) != tmode
6431 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6432 target = gen_reg_rtx (tmode);
6434 pat = GEN_FCN (icode) (target);
6439 case SPE_BUILTIN_MTSPEFSCR:
6440 icode = CODE_FOR_spe_mtspefscr;
6441 arg0 = TREE_VALUE (arglist);
6442 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6443 mode0 = insn_data[icode].operand[0].mode;
6445 if (arg0 == error_mark_node)
6448 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6449 op0 = copy_to_mode_reg (mode0, op0);
6451 pat = GEN_FCN (icode) (op0);
6464 spe_expand_predicate_builtin (enum insn_code icode, tree arglist, rtx target)
6466 rtx pat, scratch, tmp;
6467 tree form = TREE_VALUE (arglist);
6468 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
6469 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6470 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6471 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6472 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6473 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6477 if (TREE_CODE (form) != INTEGER_CST)
6479 error ("argument 1 of __builtin_spe_predicate must be a constant");
6483 form_int = TREE_INT_CST_LOW (form);
6488 if (arg0 == error_mark_node || arg1 == error_mark_node)
6492 || GET_MODE (target) != SImode
6493 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
6494 target = gen_reg_rtx (SImode);
6496 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6497 op0 = copy_to_mode_reg (mode0, op0);
6498 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6499 op1 = copy_to_mode_reg (mode1, op1);
6501 scratch = gen_reg_rtx (CCmode);
6503 pat = GEN_FCN (icode) (scratch, op0, op1);
6508 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
6509 _lower_. We use one compare, but look in different bits of the
6510 CR for each variant.
6512 There are 2 elements in each SPE simd type (upper/lower). The CR
6513 bits are set as follows:
6515 BIT0 | BIT 1 | BIT 2 | BIT 3
6516 U | L | (U | L) | (U & L)
6518 So, for an "all" relationship, BIT 3 would be set.
6519 For an "any" relationship, BIT 2 would be set. Etc.
6521 Following traditional nomenclature, these bits map to:
6523 BIT0 | BIT 1 | BIT 2 | BIT 3
6526 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
6531 /* All variant. OV bit. */
6533 /* We need to get to the OV bit, which is the ORDERED bit. We
6534 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
6535 that's ugly and will trigger a validate_condition_mode abort.
6536 So let's just use another pattern. */
6537 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
6539 /* Any variant. EQ bit. */
6543 /* Upper variant. LT bit. */
6547 /* Lower variant. GT bit. */
6552 error ("argument 1 of __builtin_spe_predicate is out of range");
6556 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
6557 emit_move_insn (target, tmp);
6562 /* The evsel builtins look like this:
6564 e = __builtin_spe_evsel_OP (a, b, c, d);
6568 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
6569 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
6573 spe_expand_evsel_builtin (enum insn_code icode, tree arglist, rtx target)
6576 tree arg0 = TREE_VALUE (arglist);
6577 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6578 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6579 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
6580 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6581 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6582 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6583 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
6584 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6585 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6590 if (arg0 == error_mark_node || arg1 == error_mark_node
6591 || arg2 == error_mark_node || arg3 == error_mark_node)
6595 || GET_MODE (target) != mode0
6596 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
6597 target = gen_reg_rtx (mode0);
6599 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6600 op0 = copy_to_mode_reg (mode0, op0);
6601 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
6602 op1 = copy_to_mode_reg (mode0, op1);
6603 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
6604 op2 = copy_to_mode_reg (mode0, op2);
6605 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
6606 op3 = copy_to_mode_reg (mode0, op3);
6608 /* Generate the compare. */
6609 scratch = gen_reg_rtx (CCmode);
6610 pat = GEN_FCN (icode) (scratch, op0, op1);
6615 if (mode0 == V2SImode)
6616 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
6618 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
6623 /* Expand an expression EXP that calls a built-in function,
6624 with result going to TARGET if that's convenient
6625 (and in mode MODE if that's convenient).
6626 SUBTARGET may be used as the target for computing one of EXP's operands.
6627 IGNORE is nonzero if the value is to be ignored. */
6630 rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
6631 enum machine_mode mode ATTRIBUTE_UNUSED,
6632 int ignore ATTRIBUTE_UNUSED)
6634 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6635 tree arglist = TREE_OPERAND (exp, 1);
6636 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6637 struct builtin_description *d;
6644 ret = altivec_expand_builtin (exp, target, &success);
6651 ret = spe_expand_builtin (exp, target, &success);
6657 if (TARGET_ALTIVEC || TARGET_SPE)
6659 /* Handle simple unary operations. */
6660 d = (struct builtin_description *) bdesc_1arg;
6661 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6662 if (d->code == fcode)
6663 return rs6000_expand_unop_builtin (d->icode, arglist, target);
6665 /* Handle simple binary operations. */
6666 d = (struct builtin_description *) bdesc_2arg;
6667 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6668 if (d->code == fcode)
6669 return rs6000_expand_binop_builtin (d->icode, arglist, target);
6671 /* Handle simple ternary operations. */
6672 d = (struct builtin_description *) bdesc_3arg;
6673 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
6674 if (d->code == fcode)
6675 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
6683 rs6000_init_builtins (void)
6685 opaque_V2SI_type_node = copy_node (V2SI_type_node);
6686 opaque_V2SF_type_node = copy_node (V2SF_type_node);
6687 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
6689 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
6690 types, especially in C++ land. Similarly, 'vector pixel' is distinct from+ 'vector unsigned short'. */
6692 bool_char_type_node = copy_node (unsigned_intQI_type_node);
6693 TYPE_MAIN_VARIANT (bool_char_type_node) = bool_char_type_node;
6694 bool_short_type_node = copy_node (unsigned_intHI_type_node);
6695 TYPE_MAIN_VARIANT (bool_short_type_node) = bool_short_type_node;
6696 bool_int_type_node = copy_node (unsigned_intSI_type_node);
6697 TYPE_MAIN_VARIANT (bool_int_type_node) = bool_int_type_node;
6698 pixel_type_node = copy_node (unsigned_intHI_type_node);
6699 TYPE_MAIN_VARIANT (pixel_type_node) = pixel_type_node;
6701 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6702 get_identifier ("__bool char"),
6703 bool_char_type_node));
6704 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6705 get_identifier ("__bool short"),
6706 bool_short_type_node));
6707 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6708 get_identifier ("__bool int"),
6709 bool_int_type_node));
6710 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6711 get_identifier ("__pixel"),
6714 bool_V16QI_type_node = make_vector (V16QImode, bool_char_type_node, 1);
6715 bool_V8HI_type_node = make_vector (V8HImode, bool_short_type_node, 1);
6716 bool_V4SI_type_node = make_vector (V4SImode, bool_int_type_node, 1);
6717 pixel_V8HI_type_node = make_vector (V8HImode, pixel_type_node, 1);
6719 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6720 get_identifier ("__vector unsigned char"),
6721 unsigned_V16QI_type_node));
6722 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6723 get_identifier ("__vector signed char"),
6725 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6726 get_identifier ("__vector __bool char"),
6727 bool_V16QI_type_node));
6729 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6730 get_identifier ("__vector unsigned short"),
6731 unsigned_V8HI_type_node));
6732 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6733 get_identifier ("__vector signed short"),
6735 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6736 get_identifier ("__vector __bool short"),
6737 bool_V8HI_type_node));
6739 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6740 get_identifier ("__vector unsigned int"),
6741 unsigned_V4SI_type_node));
6742 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6743 get_identifier ("__vector signed int"),
6745 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6746 get_identifier ("__vector __bool int"),
6747 bool_V4SI_type_node));
6749 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6750 get_identifier ("__vector float"),
6752 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6753 get_identifier ("__vector __pixel"),
6754 pixel_V8HI_type_node));
6757 spe_init_builtins ();
6759 altivec_init_builtins ();
6760 if (TARGET_ALTIVEC || TARGET_SPE)
6761 rs6000_common_init_builtins ();
6764 /* Search through a set of builtins and enable the mask bits.
6765 DESC is an array of builtins.
6766 SIZE is the total number of builtins.
6767 START is the builtin enum at which to start.
6768 END is the builtin enum at which to end. */
6770 enable_mask_for_builtins (struct builtin_description *desc, int size,
6771 enum rs6000_builtins start,
6772 enum rs6000_builtins end)
6776 for (i = 0; i < size; ++i)
6777 if (desc[i].code == start)
6783 for (; i < size; ++i)
6785 /* Flip all the bits on. */
6786 desc[i].mask = target_flags;
6787 if (desc[i].code == end)
6793 spe_init_builtins (void)
6795 tree endlink = void_list_node;
6796 tree puint_type_node = build_pointer_type (unsigned_type_node);
6797 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
6798 struct builtin_description *d;
6801 tree v2si_ftype_4_v2si
6802 = build_function_type
6803 (opaque_V2SI_type_node,
6804 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6805 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6806 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6807 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6810 tree v2sf_ftype_4_v2sf
6811 = build_function_type
6812 (opaque_V2SF_type_node,
6813 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6814 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6815 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6816 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6819 tree int_ftype_int_v2si_v2si
6820 = build_function_type
6822 tree_cons (NULL_TREE, integer_type_node,
6823 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6824 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6827 tree int_ftype_int_v2sf_v2sf
6828 = build_function_type
6830 tree_cons (NULL_TREE, integer_type_node,
6831 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6832 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6835 tree void_ftype_v2si_puint_int
6836 = build_function_type (void_type_node,
6837 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6838 tree_cons (NULL_TREE, puint_type_node,
6839 tree_cons (NULL_TREE,
6843 tree void_ftype_v2si_puint_char
6844 = build_function_type (void_type_node,
6845 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6846 tree_cons (NULL_TREE, puint_type_node,
6847 tree_cons (NULL_TREE,
6851 tree void_ftype_v2si_pv2si_int
6852 = build_function_type (void_type_node,
6853 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6854 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6855 tree_cons (NULL_TREE,
6859 tree void_ftype_v2si_pv2si_char
6860 = build_function_type (void_type_node,
6861 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6862 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6863 tree_cons (NULL_TREE,
6868 = build_function_type (void_type_node,
6869 tree_cons (NULL_TREE, integer_type_node, endlink));
6872 = build_function_type (integer_type_node, endlink);
6874 tree v2si_ftype_pv2si_int
6875 = build_function_type (opaque_V2SI_type_node,
6876 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6877 tree_cons (NULL_TREE, integer_type_node,
6880 tree v2si_ftype_puint_int
6881 = build_function_type (opaque_V2SI_type_node,
6882 tree_cons (NULL_TREE, puint_type_node,
6883 tree_cons (NULL_TREE, integer_type_node,
6886 tree v2si_ftype_pushort_int
6887 = build_function_type (opaque_V2SI_type_node,
6888 tree_cons (NULL_TREE, pushort_type_node,
6889 tree_cons (NULL_TREE, integer_type_node,
6892 tree v2si_ftype_signed_char
6893 = build_function_type (opaque_V2SI_type_node,
6894 tree_cons (NULL_TREE, signed_char_type_node,
6897 /* The initialization of the simple binary and unary builtins is
6898 done in rs6000_common_init_builtins, but we have to enable the
6899 mask bits here manually because we have run out of `target_flags'
6900 bits. We really need to redesign this mask business. */
6902 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
6903 ARRAY_SIZE (bdesc_2arg),
6906 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
6907 ARRAY_SIZE (bdesc_1arg),
6909 SPE_BUILTIN_EVSUBFUSIAAW);
6910 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
6911 ARRAY_SIZE (bdesc_spe_predicates),
6912 SPE_BUILTIN_EVCMPEQ,
6913 SPE_BUILTIN_EVFSTSTLT);
6914 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
6915 ARRAY_SIZE (bdesc_spe_evsel),
6916 SPE_BUILTIN_EVSEL_CMPGTS,
6917 SPE_BUILTIN_EVSEL_FSTSTEQ);
6919 (*lang_hooks.decls.pushdecl)
6920 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
6921 opaque_V2SI_type_node));
6923 /* Initialize irregular SPE builtins. */
6925 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
6926 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
6927 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
6928 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
6929 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
6930 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
6931 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
6932 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
6933 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
6934 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
6935 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
6936 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
6937 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
6938 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
6939 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
6940 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
6941 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
6942 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
6945 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
6946 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
6947 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
6948 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
6949 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
6950 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
6951 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
6952 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
6953 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
6954 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
6955 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
6956 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
6957 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
6958 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
6959 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
6960 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
6961 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
6962 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
6963 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
6964 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
6965 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
6966 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
6969 d = (struct builtin_description *) bdesc_spe_predicates;
6970 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
6974 switch (insn_data[d->icode].operand[1].mode)
6977 type = int_ftype_int_v2si_v2si;
6980 type = int_ftype_int_v2sf_v2sf;
6986 def_builtin (d->mask, d->name, type, d->code);
6989 /* Evsel predicates. */
6990 d = (struct builtin_description *) bdesc_spe_evsel;
6991 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
6995 switch (insn_data[d->icode].operand[1].mode)
6998 type = v2si_ftype_4_v2si;
7001 type = v2sf_ftype_4_v2sf;
7007 def_builtin (d->mask, d->name, type, d->code);
7012 altivec_init_builtins (void)
7014 struct builtin_description *d;
7015 struct builtin_description_predicates *dp;
7017 tree pfloat_type_node = build_pointer_type (float_type_node);
7018 tree pint_type_node = build_pointer_type (integer_type_node);
7019 tree pshort_type_node = build_pointer_type (short_integer_type_node);
7020 tree pchar_type_node = build_pointer_type (char_type_node);
7022 tree pvoid_type_node = build_pointer_type (void_type_node);
7024 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
7025 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
7026 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
7027 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
7029 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
7031 tree int_ftype_int_v4si_v4si
7032 = build_function_type_list (integer_type_node,
7033 integer_type_node, V4SI_type_node,
7034 V4SI_type_node, NULL_TREE);
7035 tree v4sf_ftype_pcfloat
7036 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
7037 tree void_ftype_pfloat_v4sf
7038 = build_function_type_list (void_type_node,
7039 pfloat_type_node, V4SF_type_node, NULL_TREE);
7040 tree v4si_ftype_pcint
7041 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
7042 tree void_ftype_pint_v4si
7043 = build_function_type_list (void_type_node,
7044 pint_type_node, V4SI_type_node, NULL_TREE);
7045 tree v8hi_ftype_pcshort
7046 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
7047 tree void_ftype_pshort_v8hi
7048 = build_function_type_list (void_type_node,
7049 pshort_type_node, V8HI_type_node, NULL_TREE);
7050 tree v16qi_ftype_pcchar
7051 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
7052 tree void_ftype_pchar_v16qi
7053 = build_function_type_list (void_type_node,
7054 pchar_type_node, V16QI_type_node, NULL_TREE);
7055 tree void_ftype_v4si
7056 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
7057 tree v8hi_ftype_void
7058 = build_function_type (V8HI_type_node, void_list_node);
7059 tree void_ftype_void
7060 = build_function_type (void_type_node, void_list_node);
7062 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
7064 tree v16qi_ftype_long_pcvoid
7065 = build_function_type_list (V16QI_type_node,
7066 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7067 tree v8hi_ftype_long_pcvoid
7068 = build_function_type_list (V8HI_type_node,
7069 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7070 tree v4si_ftype_long_pcvoid
7071 = build_function_type_list (V4SI_type_node,
7072 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7074 tree void_ftype_v4si_long_pvoid
7075 = build_function_type_list (void_type_node,
7076 V4SI_type_node, long_integer_type_node,
7077 pvoid_type_node, NULL_TREE);
7078 tree void_ftype_v16qi_long_pvoid
7079 = build_function_type_list (void_type_node,
7080 V16QI_type_node, long_integer_type_node,
7081 pvoid_type_node, NULL_TREE);
7082 tree void_ftype_v8hi_long_pvoid
7083 = build_function_type_list (void_type_node,
7084 V8HI_type_node, long_integer_type_node,
7085 pvoid_type_node, NULL_TREE);
7086 tree int_ftype_int_v8hi_v8hi
7087 = build_function_type_list (integer_type_node,
7088 integer_type_node, V8HI_type_node,
7089 V8HI_type_node, NULL_TREE);
7090 tree int_ftype_int_v16qi_v16qi
7091 = build_function_type_list (integer_type_node,
7092 integer_type_node, V16QI_type_node,
7093 V16QI_type_node, NULL_TREE);
7094 tree int_ftype_int_v4sf_v4sf
7095 = build_function_type_list (integer_type_node,
7096 integer_type_node, V4SF_type_node,
7097 V4SF_type_node, NULL_TREE);
7098 tree v4si_ftype_v4si
7099 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
7100 tree v8hi_ftype_v8hi
7101 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
7102 tree v16qi_ftype_v16qi
7103 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
7104 tree v4sf_ftype_v4sf
7105 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
7106 tree void_ftype_pcvoid_int_int
7107 = build_function_type_list (void_type_node,
7108 pcvoid_type_node, integer_type_node,
7109 integer_type_node, NULL_TREE);
7110 tree int_ftype_pcchar
7111 = build_function_type_list (integer_type_node,
7112 pcchar_type_node, NULL_TREE);
7114 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
7115 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
7116 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
7117 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
7118 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
7119 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
7120 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
7121 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
7122 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
7123 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
7124 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
7125 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
7126 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
7127 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
7128 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
7129 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
7130 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
7131 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
7132 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
7133 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
7134 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
7135 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
7136 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
7137 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
7138 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
7139 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
7140 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
7141 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
7142 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
7143 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
7144 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
7145 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
7147 /* See altivec.h for usage of "__builtin_altivec_compiletime_error". */
7148 def_builtin (MASK_ALTIVEC, "__builtin_altivec_compiletime_error", int_ftype_pcchar,
7149 ALTIVEC_BUILTIN_COMPILETIME_ERROR);
7151 /* Add the DST variants. */
7152 d = (struct builtin_description *) bdesc_dst;
7153 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
7154 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
7156 /* Initialize the predicates. */
7157 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
7158 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
7160 enum machine_mode mode1;
7163 mode1 = insn_data[dp->icode].operand[1].mode;
7168 type = int_ftype_int_v4si_v4si;
7171 type = int_ftype_int_v8hi_v8hi;
7174 type = int_ftype_int_v16qi_v16qi;
7177 type = int_ftype_int_v4sf_v4sf;
7183 def_builtin (dp->mask, dp->name, type, dp->code);
7186 /* Initialize the abs* operators. */
7187 d = (struct builtin_description *) bdesc_abs;
7188 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
7190 enum machine_mode mode0;
7193 mode0 = insn_data[d->icode].operand[0].mode;
7198 type = v4si_ftype_v4si;
7201 type = v8hi_ftype_v8hi;
7204 type = v16qi_ftype_v16qi;
7207 type = v4sf_ftype_v4sf;
7213 def_builtin (d->mask, d->name, type, d->code);
7218 rs6000_common_init_builtins (void)
7220 struct builtin_description *d;
7223 tree v4sf_ftype_v4sf_v4sf_v16qi
7224 = build_function_type_list (V4SF_type_node,
7225 V4SF_type_node, V4SF_type_node,
7226 V16QI_type_node, NULL_TREE);
7227 tree v4si_ftype_v4si_v4si_v16qi
7228 = build_function_type_list (V4SI_type_node,
7229 V4SI_type_node, V4SI_type_node,
7230 V16QI_type_node, NULL_TREE);
7231 tree v8hi_ftype_v8hi_v8hi_v16qi
7232 = build_function_type_list (V8HI_type_node,
7233 V8HI_type_node, V8HI_type_node,
7234 V16QI_type_node, NULL_TREE);
7235 tree v16qi_ftype_v16qi_v16qi_v16qi
7236 = build_function_type_list (V16QI_type_node,
7237 V16QI_type_node, V16QI_type_node,
7238 V16QI_type_node, NULL_TREE);
7240 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
7242 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
7243 tree v16qi_ftype_int
7244 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
7245 tree v8hi_ftype_v16qi
7246 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
7247 tree v4sf_ftype_v4sf
7248 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
7250 tree v2si_ftype_v2si_v2si
7251 = build_function_type_list (opaque_V2SI_type_node,
7252 opaque_V2SI_type_node,
7253 opaque_V2SI_type_node, NULL_TREE);
7255 tree v2sf_ftype_v2sf_v2sf
7256 = build_function_type_list (opaque_V2SF_type_node,
7257 opaque_V2SF_type_node,
7258 opaque_V2SF_type_node, NULL_TREE);
7260 tree v2si_ftype_int_int
7261 = build_function_type_list (opaque_V2SI_type_node,
7262 integer_type_node, integer_type_node,
7265 tree v2si_ftype_v2si
7266 = build_function_type_list (opaque_V2SI_type_node,
7267 opaque_V2SI_type_node, NULL_TREE);
7269 tree v2sf_ftype_v2sf
7270 = build_function_type_list (opaque_V2SF_type_node,
7271 opaque_V2SF_type_node, NULL_TREE);
7273 tree v2sf_ftype_v2si
7274 = build_function_type_list (opaque_V2SF_type_node,
7275 opaque_V2SI_type_node, NULL_TREE);
7277 tree v2si_ftype_v2sf
7278 = build_function_type_list (opaque_V2SI_type_node,
7279 opaque_V2SF_type_node, NULL_TREE);
7281 tree v2si_ftype_v2si_char
7282 = build_function_type_list (opaque_V2SI_type_node,
7283 opaque_V2SI_type_node,
7284 char_type_node, NULL_TREE);
7286 tree v2si_ftype_int_char
7287 = build_function_type_list (opaque_V2SI_type_node,
7288 integer_type_node, char_type_node, NULL_TREE);
7290 tree v2si_ftype_char
7291 = build_function_type_list (opaque_V2SI_type_node,
7292 char_type_node, NULL_TREE);
7294 tree int_ftype_int_int
7295 = build_function_type_list (integer_type_node,
7296 integer_type_node, integer_type_node,
7299 tree v4si_ftype_v4si_v4si
7300 = build_function_type_list (V4SI_type_node,
7301 V4SI_type_node, V4SI_type_node, NULL_TREE);
7302 tree v4sf_ftype_v4si_int
7303 = build_function_type_list (V4SF_type_node,
7304 V4SI_type_node, integer_type_node, NULL_TREE);
7305 tree v4si_ftype_v4sf_int
7306 = build_function_type_list (V4SI_type_node,
7307 V4SF_type_node, integer_type_node, NULL_TREE);
7308 tree v4si_ftype_v4si_int
7309 = build_function_type_list (V4SI_type_node,
7310 V4SI_type_node, integer_type_node, NULL_TREE);
7311 tree v8hi_ftype_v8hi_int
7312 = build_function_type_list (V8HI_type_node,
7313 V8HI_type_node, integer_type_node, NULL_TREE);
7314 tree v16qi_ftype_v16qi_int
7315 = build_function_type_list (V16QI_type_node,
7316 V16QI_type_node, integer_type_node, NULL_TREE);
7317 tree v16qi_ftype_v16qi_v16qi_int
7318 = build_function_type_list (V16QI_type_node,
7319 V16QI_type_node, V16QI_type_node,
7320 integer_type_node, NULL_TREE);
7321 tree v8hi_ftype_v8hi_v8hi_int
7322 = build_function_type_list (V8HI_type_node,
7323 V8HI_type_node, V8HI_type_node,
7324 integer_type_node, NULL_TREE);
7325 tree v4si_ftype_v4si_v4si_int
7326 = build_function_type_list (V4SI_type_node,
7327 V4SI_type_node, V4SI_type_node,
7328 integer_type_node, NULL_TREE);
7329 tree v4sf_ftype_v4sf_v4sf_int
7330 = build_function_type_list (V4SF_type_node,
7331 V4SF_type_node, V4SF_type_node,
7332 integer_type_node, NULL_TREE);
7333 tree v4sf_ftype_v4sf_v4sf
7334 = build_function_type_list (V4SF_type_node,
7335 V4SF_type_node, V4SF_type_node, NULL_TREE);
7336 tree v4sf_ftype_v4sf_v4sf_v4si
7337 = build_function_type_list (V4SF_type_node,
7338 V4SF_type_node, V4SF_type_node,
7339 V4SI_type_node, NULL_TREE);
7340 tree v4sf_ftype_v4sf_v4sf_v4sf
7341 = build_function_type_list (V4SF_type_node,
7342 V4SF_type_node, V4SF_type_node,
7343 V4SF_type_node, NULL_TREE);
7344 tree v4si_ftype_v4si_v4si_v4si
7345 = build_function_type_list (V4SI_type_node,
7346 V4SI_type_node, V4SI_type_node,
7347 V4SI_type_node, NULL_TREE);
7348 tree v8hi_ftype_v8hi_v8hi
7349 = build_function_type_list (V8HI_type_node,
7350 V8HI_type_node, V8HI_type_node, NULL_TREE);
7351 tree v8hi_ftype_v8hi_v8hi_v8hi
7352 = build_function_type_list (V8HI_type_node,
7353 V8HI_type_node, V8HI_type_node,
7354 V8HI_type_node, NULL_TREE);
7355 tree v4si_ftype_v8hi_v8hi_v4si
7356 = build_function_type_list (V4SI_type_node,
7357 V8HI_type_node, V8HI_type_node,
7358 V4SI_type_node, NULL_TREE);
7359 tree v4si_ftype_v16qi_v16qi_v4si
7360 = build_function_type_list (V4SI_type_node,
7361 V16QI_type_node, V16QI_type_node,
7362 V4SI_type_node, NULL_TREE);
7363 tree v16qi_ftype_v16qi_v16qi
7364 = build_function_type_list (V16QI_type_node,
7365 V16QI_type_node, V16QI_type_node, NULL_TREE);
7366 tree v4si_ftype_v4sf_v4sf
7367 = build_function_type_list (V4SI_type_node,
7368 V4SF_type_node, V4SF_type_node, NULL_TREE);
7369 tree v8hi_ftype_v16qi_v16qi
7370 = build_function_type_list (V8HI_type_node,
7371 V16QI_type_node, V16QI_type_node, NULL_TREE);
7372 tree v4si_ftype_v8hi_v8hi
7373 = build_function_type_list (V4SI_type_node,
7374 V8HI_type_node, V8HI_type_node, NULL_TREE);
7375 tree v8hi_ftype_v4si_v4si
7376 = build_function_type_list (V8HI_type_node,
7377 V4SI_type_node, V4SI_type_node, NULL_TREE);
7378 tree v16qi_ftype_v8hi_v8hi
7379 = build_function_type_list (V16QI_type_node,
7380 V8HI_type_node, V8HI_type_node, NULL_TREE);
7381 tree v4si_ftype_v16qi_v4si
7382 = build_function_type_list (V4SI_type_node,
7383 V16QI_type_node, V4SI_type_node, NULL_TREE);
7384 tree v4si_ftype_v16qi_v16qi
7385 = build_function_type_list (V4SI_type_node,
7386 V16QI_type_node, V16QI_type_node, NULL_TREE);
7387 tree v4si_ftype_v8hi_v4si
7388 = build_function_type_list (V4SI_type_node,
7389 V8HI_type_node, V4SI_type_node, NULL_TREE);
7390 tree v4si_ftype_v8hi
7391 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
7392 tree int_ftype_v4si_v4si
7393 = build_function_type_list (integer_type_node,
7394 V4SI_type_node, V4SI_type_node, NULL_TREE);
7395 tree int_ftype_v4sf_v4sf
7396 = build_function_type_list (integer_type_node,
7397 V4SF_type_node, V4SF_type_node, NULL_TREE);
7398 tree int_ftype_v16qi_v16qi
7399 = build_function_type_list (integer_type_node,
7400 V16QI_type_node, V16QI_type_node, NULL_TREE);
7401 tree int_ftype_v8hi_v8hi
7402 = build_function_type_list (integer_type_node,
7403 V8HI_type_node, V8HI_type_node, NULL_TREE);
7405 /* Add the simple ternary operators. */
7406 d = (struct builtin_description *) bdesc_3arg;
7407 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
7410 enum machine_mode mode0, mode1, mode2, mode3;
7413 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7416 mode0 = insn_data[d->icode].operand[0].mode;
7417 mode1 = insn_data[d->icode].operand[1].mode;
7418 mode2 = insn_data[d->icode].operand[2].mode;
7419 mode3 = insn_data[d->icode].operand[3].mode;
7421 /* When all four are of the same mode. */
7422 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
7427 type = v4si_ftype_v4si_v4si_v4si;
7430 type = v4sf_ftype_v4sf_v4sf_v4sf;
7433 type = v8hi_ftype_v8hi_v8hi_v8hi;
7436 type = v16qi_ftype_v16qi_v16qi_v16qi;
7442 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
7447 type = v4si_ftype_v4si_v4si_v16qi;
7450 type = v4sf_ftype_v4sf_v4sf_v16qi;
7453 type = v8hi_ftype_v8hi_v8hi_v16qi;
7456 type = v16qi_ftype_v16qi_v16qi_v16qi;
7462 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
7463 && mode3 == V4SImode)
7464 type = v4si_ftype_v16qi_v16qi_v4si;
7465 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
7466 && mode3 == V4SImode)
7467 type = v4si_ftype_v8hi_v8hi_v4si;
7468 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
7469 && mode3 == V4SImode)
7470 type = v4sf_ftype_v4sf_v4sf_v4si;
7472 /* vchar, vchar, vchar, 4 bit literal. */
7473 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
7475 type = v16qi_ftype_v16qi_v16qi_int;
7477 /* vshort, vshort, vshort, 4 bit literal. */
7478 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
7480 type = v8hi_ftype_v8hi_v8hi_int;
7482 /* vint, vint, vint, 4 bit literal. */
7483 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
7485 type = v4si_ftype_v4si_v4si_int;
7487 /* vfloat, vfloat, vfloat, 4 bit literal. */
7488 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
7490 type = v4sf_ftype_v4sf_v4sf_int;
7495 def_builtin (d->mask, d->name, type, d->code);
7498 /* Add the simple binary operators. */
7499 d = (struct builtin_description *) bdesc_2arg;
7500 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7502 enum machine_mode mode0, mode1, mode2;
7505 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7508 mode0 = insn_data[d->icode].operand[0].mode;
7509 mode1 = insn_data[d->icode].operand[1].mode;
7510 mode2 = insn_data[d->icode].operand[2].mode;
7512 /* When all three operands are of the same mode. */
7513 if (mode0 == mode1 && mode1 == mode2)
7518 type = v4sf_ftype_v4sf_v4sf;
7521 type = v4si_ftype_v4si_v4si;
7524 type = v16qi_ftype_v16qi_v16qi;
7527 type = v8hi_ftype_v8hi_v8hi;
7530 type = v2si_ftype_v2si_v2si;
7533 type = v2sf_ftype_v2sf_v2sf;
7536 type = int_ftype_int_int;
7543 /* A few other combos we really don't want to do manually. */
7545 /* vint, vfloat, vfloat. */
7546 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
7547 type = v4si_ftype_v4sf_v4sf;
7549 /* vshort, vchar, vchar. */
7550 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
7551 type = v8hi_ftype_v16qi_v16qi;
7553 /* vint, vshort, vshort. */
7554 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
7555 type = v4si_ftype_v8hi_v8hi;
7557 /* vshort, vint, vint. */
7558 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
7559 type = v8hi_ftype_v4si_v4si;
7561 /* vchar, vshort, vshort. */
7562 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
7563 type = v16qi_ftype_v8hi_v8hi;
7565 /* vint, vchar, vint. */
7566 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
7567 type = v4si_ftype_v16qi_v4si;
7569 /* vint, vchar, vchar. */
7570 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
7571 type = v4si_ftype_v16qi_v16qi;
7573 /* vint, vshort, vint. */
7574 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
7575 type = v4si_ftype_v8hi_v4si;
7577 /* vint, vint, 5 bit literal. */
7578 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
7579 type = v4si_ftype_v4si_int;
7581 /* vshort, vshort, 5 bit literal. */
7582 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
7583 type = v8hi_ftype_v8hi_int;
7585 /* vchar, vchar, 5 bit literal. */
7586 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
7587 type = v16qi_ftype_v16qi_int;
7589 /* vfloat, vint, 5 bit literal. */
7590 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
7591 type = v4sf_ftype_v4si_int;
7593 /* vint, vfloat, 5 bit literal. */
7594 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
7595 type = v4si_ftype_v4sf_int;
7597 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
7598 type = v2si_ftype_int_int;
7600 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
7601 type = v2si_ftype_v2si_char;
7603 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
7604 type = v2si_ftype_int_char;
7607 else if (mode0 == SImode)
7612 type = int_ftype_v4si_v4si;
7615 type = int_ftype_v4sf_v4sf;
7618 type = int_ftype_v16qi_v16qi;
7621 type = int_ftype_v8hi_v8hi;
7631 def_builtin (d->mask, d->name, type, d->code);
7634 /* Add the simple unary operators. */
7635 d = (struct builtin_description *) bdesc_1arg;
7636 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7638 enum machine_mode mode0, mode1;
7641 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7644 mode0 = insn_data[d->icode].operand[0].mode;
7645 mode1 = insn_data[d->icode].operand[1].mode;
7647 if (mode0 == V4SImode && mode1 == QImode)
7648 type = v4si_ftype_int;
7649 else if (mode0 == V8HImode && mode1 == QImode)
7650 type = v8hi_ftype_int;
7651 else if (mode0 == V16QImode && mode1 == QImode)
7652 type = v16qi_ftype_int;
7653 else if (mode0 == V4SFmode && mode1 == V4SFmode)
7654 type = v4sf_ftype_v4sf;
7655 else if (mode0 == V8HImode && mode1 == V16QImode)
7656 type = v8hi_ftype_v16qi;
7657 else if (mode0 == V4SImode && mode1 == V8HImode)
7658 type = v4si_ftype_v8hi;
7659 else if (mode0 == V2SImode && mode1 == V2SImode)
7660 type = v2si_ftype_v2si;
7661 else if (mode0 == V2SFmode && mode1 == V2SFmode)
7662 type = v2sf_ftype_v2sf;
7663 else if (mode0 == V2SFmode && mode1 == V2SImode)
7664 type = v2sf_ftype_v2si;
7665 else if (mode0 == V2SImode && mode1 == V2SFmode)
7666 type = v2si_ftype_v2sf;
7667 else if (mode0 == V2SImode && mode1 == QImode)
7668 type = v2si_ftype_char;
7672 def_builtin (d->mask, d->name, type, d->code);
7677 rs6000_init_libfuncs (void)
7679 if (!TARGET_HARD_FLOAT)
7682 if (DEFAULT_ABI != ABI_V4)
7684 if (TARGET_XCOFF && ! TARGET_POWER2 && ! TARGET_POWERPC)
7686 /* AIX library routines for float->int conversion. */
7687 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
7688 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
7689 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
7690 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
7693 /* AIX/Darwin/64-bit Linux quad floating point routines. */
7694 if (!TARGET_XL_COMPAT)
7696 set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
7697 set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
7698 set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
7699 set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
7703 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
7704 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
7705 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
7706 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
7711 /* 32-bit SVR4 quad floating point routines. */
7713 set_optab_libfunc (add_optab, TFmode, "_q_add");
7714 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
7715 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
7716 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
7717 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
7718 if (TARGET_PPC_GPOPT || TARGET_POWER2)
7719 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
7721 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
7722 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
7723 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
7724 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
7725 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
7726 set_optab_libfunc (le_optab, TFmode, "_q_fle");
7728 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
7729 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
7730 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
7731 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
7732 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
7733 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
7734 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
7738 /* Expand a block move operation, and return 1 if successful. Return 0
7739 if we should let the compiler generate normal code.
7741 operands[0] is the destination
7742 operands[1] is the source
7743 operands[2] is the length
7744 operands[3] is the alignment */
7746 #define MAX_MOVE_REG 4
7749 expand_block_move (rtx operands[])
7751 rtx orig_dest = operands[0];
7752 rtx orig_src = operands[1];
7753 rtx bytes_rtx = operands[2];
7754 rtx align_rtx = operands[3];
7755 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
7760 rtx stores[MAX_MOVE_REG];
7763 /* If this is not a fixed size move, just call memcpy */
7767 /* If this is not a fixed size alignment, abort */
7768 if (GET_CODE (align_rtx) != CONST_INT)
7770 align = INTVAL (align_rtx);
7772 /* Anything to move? */
7773 bytes = INTVAL (bytes_rtx);
7777 /* store_one_arg depends on expand_block_move to handle at least the size of
7778 reg_parm_stack_space. */
7779 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7782 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7785 rtx (*movstrsi) (rtx, rtx, rtx, rtx);
7786 rtx (*mov) (rtx, rtx);
7788 enum machine_mode mode = BLKmode;
7792 && bytes > 24 /* move up to 32 bytes at a time */
7800 && ! fixed_regs[12])
7802 move_bytes = (bytes > 32) ? 32 : bytes;
7803 gen_func.movstrsi = gen_movstrsi_8reg;
7805 else if (TARGET_STRING
7806 && bytes > 16 /* move up to 24 bytes at a time */
7812 && ! fixed_regs[10])
7814 move_bytes = (bytes > 24) ? 24 : bytes;
7815 gen_func.movstrsi = gen_movstrsi_6reg;
7817 else if (TARGET_STRING
7818 && bytes > 8 /* move up to 16 bytes at a time */
7824 move_bytes = (bytes > 16) ? 16 : bytes;
7825 gen_func.movstrsi = gen_movstrsi_4reg;
7827 else if (bytes >= 8 && TARGET_POWERPC64
7828 /* 64-bit loads and stores require word-aligned
7830 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
7834 gen_func.mov = gen_movdi;
7836 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
7837 { /* move up to 8 bytes at a time */
7838 move_bytes = (bytes > 8) ? 8 : bytes;
7839 gen_func.movstrsi = gen_movstrsi_2reg;
7841 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
7842 { /* move 4 bytes */
7845 gen_func.mov = gen_movsi;
7847 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
7848 { /* move 2 bytes */
7851 gen_func.mov = gen_movhi;
7853 else if (TARGET_STRING && bytes > 1)
7854 { /* move up to 4 bytes at a time */
7855 move_bytes = (bytes > 4) ? 4 : bytes;
7856 gen_func.movstrsi = gen_movstrsi_1reg;
7858 else /* move 1 byte at a time */
7862 gen_func.mov = gen_movqi;
7865 src = adjust_address (orig_src, mode, offset);
7866 dest = adjust_address (orig_dest, mode, offset);
7868 if (mode != BLKmode)
7870 rtx tmp_reg = gen_reg_rtx (mode);
7872 emit_insn ((*gen_func.mov) (tmp_reg, src));
7873 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
7876 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
7879 for (i = 0; i < num_reg; i++)
7880 emit_insn (stores[i]);
7884 if (mode == BLKmode)
7886 /* Move the address into scratch registers. The movstrsi
7887 patterns require zero offset. */
7888 if (!REG_P (XEXP (src, 0)))
7890 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
7891 src = replace_equiv_address (src, src_reg);
7893 set_mem_size (src, GEN_INT (move_bytes));
7895 if (!REG_P (XEXP (dest, 0)))
7897 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
7898 dest = replace_equiv_address (dest, dest_reg);
7900 set_mem_size (dest, GEN_INT (move_bytes));
7902 emit_insn ((*gen_func.movstrsi) (dest, src,
7903 GEN_INT (move_bytes & 31),
7912 /* Return 1 if OP is a load multiple operation. It is known to be a
7913 PARALLEL and the first section will be tested. */
7916 load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7918 int count = XVECLEN (op, 0);
7919 unsigned int dest_regno;
7923 /* Perform a quick check so we don't blow up below. */
7925 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7926 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7927 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7930 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7931 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7933 for (i = 1; i < count; i++)
7935 rtx elt = XVECEXP (op, 0, i);
7937 if (GET_CODE (elt) != SET
7938 || GET_CODE (SET_DEST (elt)) != REG
7939 || GET_MODE (SET_DEST (elt)) != SImode
7940 || REGNO (SET_DEST (elt)) != dest_regno + i
7941 || GET_CODE (SET_SRC (elt)) != MEM
7942 || GET_MODE (SET_SRC (elt)) != SImode
7943 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
7944 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
7945 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
7946 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
7953 /* Similar, but tests for store multiple. Here, the second vector element
7954 is a CLOBBER. It will be tested later. */
7957 store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7959 int count = XVECLEN (op, 0) - 1;
7960 unsigned int src_regno;
7964 /* Perform a quick check so we don't blow up below. */
7966 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7967 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7968 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7971 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7972 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7974 for (i = 1; i < count; i++)
7976 rtx elt = XVECEXP (op, 0, i + 1);
7978 if (GET_CODE (elt) != SET
7979 || GET_CODE (SET_SRC (elt)) != REG
7980 || GET_MODE (SET_SRC (elt)) != SImode
7981 || REGNO (SET_SRC (elt)) != src_regno + i
7982 || GET_CODE (SET_DEST (elt)) != MEM
7983 || GET_MODE (SET_DEST (elt)) != SImode
7984 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
7985 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
7986 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
7987 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
7994 /* Return a string to perform a load_multiple operation.
7995 operands[0] is the vector.
7996 operands[1] is the source address.
7997 operands[2] is the first destination register. */
8000 rs6000_output_load_multiple (rtx operands[3])
8002 /* We have to handle the case where the pseudo used to contain the address
8003 is assigned to one of the output registers. */
8005 int words = XVECLEN (operands[0], 0);
8008 if (XVECLEN (operands[0], 0) == 1)
8009 return "{l|lwz} %2,0(%1)";
8011 for (i = 0; i < words; i++)
8012 if (refers_to_regno_p (REGNO (operands[2]) + i,
8013 REGNO (operands[2]) + i + 1, operands[1], 0))
8017 xop[0] = GEN_INT (4 * (words-1));
8018 xop[1] = operands[1];
8019 xop[2] = operands[2];
8020 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
8025 xop[0] = GEN_INT (4 * (words-1));
8026 xop[1] = operands[1];
8027 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
8028 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
8033 for (j = 0; j < words; j++)
8036 xop[0] = GEN_INT (j * 4);
8037 xop[1] = operands[1];
8038 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
8039 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
8041 xop[0] = GEN_INT (i * 4);
8042 xop[1] = operands[1];
8043 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
8048 return "{lsi|lswi} %2,%1,%N0";
8051 /* Return 1 for a parallel vrsave operation. */
8054 vrsave_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8056 int count = XVECLEN (op, 0);
8057 unsigned int dest_regno, src_regno;
8061 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8062 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
8063 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
8066 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
8067 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
8069 if (dest_regno != VRSAVE_REGNO
8070 && src_regno != VRSAVE_REGNO)
8073 for (i = 1; i < count; i++)
8075 rtx elt = XVECEXP (op, 0, i);
8077 if (GET_CODE (elt) != CLOBBER
8078 && GET_CODE (elt) != SET)
8085 /* Return 1 for an PARALLEL suitable for mfcr. */
8088 mfcr_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8090 int count = XVECLEN (op, 0);
8093 /* Perform a quick check so we don't blow up below. */
8095 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8096 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
8097 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
8100 for (i = 0; i < count; i++)
8102 rtx exp = XVECEXP (op, 0, i);
8107 src_reg = XVECEXP (SET_SRC (exp), 0, 0);
8109 if (GET_CODE (src_reg) != REG
8110 || GET_MODE (src_reg) != CCmode
8111 || ! CR_REGNO_P (REGNO (src_reg)))
8114 if (GET_CODE (exp) != SET
8115 || GET_CODE (SET_DEST (exp)) != REG
8116 || GET_MODE (SET_DEST (exp)) != SImode
8117 || ! INT_REGNO_P (REGNO (SET_DEST (exp))))
8119 unspec = SET_SRC (exp);
8120 maskval = 1 << (MAX_CR_REGNO - REGNO (src_reg));
8122 if (GET_CODE (unspec) != UNSPEC
8123 || XINT (unspec, 1) != UNSPEC_MOVESI_FROM_CR
8124 || XVECLEN (unspec, 0) != 2
8125 || XVECEXP (unspec, 0, 0) != src_reg
8126 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
8127 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
8133 /* Return 1 for an PARALLEL suitable for mtcrf. */
8136 mtcrf_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8138 int count = XVECLEN (op, 0);
8142 /* Perform a quick check so we don't blow up below. */
8144 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8145 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
8146 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
8148 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
8150 if (GET_CODE (src_reg) != REG
8151 || GET_MODE (src_reg) != SImode
8152 || ! INT_REGNO_P (REGNO (src_reg)))
8155 for (i = 0; i < count; i++)
8157 rtx exp = XVECEXP (op, 0, i);
8161 if (GET_CODE (exp) != SET
8162 || GET_CODE (SET_DEST (exp)) != REG
8163 || GET_MODE (SET_DEST (exp)) != CCmode
8164 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
8166 unspec = SET_SRC (exp);
8167 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
8169 if (GET_CODE (unspec) != UNSPEC
8170 || XINT (unspec, 1) != UNSPEC_MOVESI_TO_CR
8171 || XVECLEN (unspec, 0) != 2
8172 || XVECEXP (unspec, 0, 0) != src_reg
8173 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
8174 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
8180 /* Return 1 for an PARALLEL suitable for lmw. */
8183 lmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8185 int count = XVECLEN (op, 0);
8186 unsigned int dest_regno;
8188 unsigned int base_regno;
8189 HOST_WIDE_INT offset;
8192 /* Perform a quick check so we don't blow up below. */
8194 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8195 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
8196 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
8199 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
8200 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
8203 || count != 32 - (int) dest_regno)
8206 if (legitimate_indirect_address_p (src_addr, 0))
8209 base_regno = REGNO (src_addr);
8210 if (base_regno == 0)
8213 else if (legitimate_offset_address_p (SImode, src_addr, 0))
8215 offset = INTVAL (XEXP (src_addr, 1));
8216 base_regno = REGNO (XEXP (src_addr, 0));
8221 for (i = 0; i < count; i++)
8223 rtx elt = XVECEXP (op, 0, i);
8226 HOST_WIDE_INT newoffset;
8228 if (GET_CODE (elt) != SET
8229 || GET_CODE (SET_DEST (elt)) != REG
8230 || GET_MODE (SET_DEST (elt)) != SImode
8231 || REGNO (SET_DEST (elt)) != dest_regno + i
8232 || GET_CODE (SET_SRC (elt)) != MEM
8233 || GET_MODE (SET_SRC (elt)) != SImode)
8235 newaddr = XEXP (SET_SRC (elt), 0);
8236 if (legitimate_indirect_address_p (newaddr, 0))
8241 else if (legitimate_offset_address_p (SImode, newaddr, 0))
8243 addr_reg = XEXP (newaddr, 0);
8244 newoffset = INTVAL (XEXP (newaddr, 1));
8248 if (REGNO (addr_reg) != base_regno
8249 || newoffset != offset + 4 * i)
8256 /* Return 1 for an PARALLEL suitable for stmw. */
8259 stmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8261 int count = XVECLEN (op, 0);
8262 unsigned int src_regno;
8264 unsigned int base_regno;
8265 HOST_WIDE_INT offset;
8268 /* Perform a quick check so we don't blow up below. */
8270 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8271 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
8272 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
8275 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
8276 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
8279 || count != 32 - (int) src_regno)
8282 if (legitimate_indirect_address_p (dest_addr, 0))
8285 base_regno = REGNO (dest_addr);
8286 if (base_regno == 0)
8289 else if (legitimate_offset_address_p (SImode, dest_addr, 0))
8291 offset = INTVAL (XEXP (dest_addr, 1));
8292 base_regno = REGNO (XEXP (dest_addr, 0));
8297 for (i = 0; i < count; i++)
8299 rtx elt = XVECEXP (op, 0, i);
8302 HOST_WIDE_INT newoffset;
8304 if (GET_CODE (elt) != SET
8305 || GET_CODE (SET_SRC (elt)) != REG
8306 || GET_MODE (SET_SRC (elt)) != SImode
8307 || REGNO (SET_SRC (elt)) != src_regno + i
8308 || GET_CODE (SET_DEST (elt)) != MEM
8309 || GET_MODE (SET_DEST (elt)) != SImode)
8311 newaddr = XEXP (SET_DEST (elt), 0);
8312 if (legitimate_indirect_address_p (newaddr, 0))
8317 else if (legitimate_offset_address_p (SImode, newaddr, 0))
8319 addr_reg = XEXP (newaddr, 0);
8320 newoffset = INTVAL (XEXP (newaddr, 1));
8324 if (REGNO (addr_reg) != base_regno
8325 || newoffset != offset + 4 * i)
8332 /* A validation routine: say whether CODE, a condition code, and MODE
8333 match. The other alternatives either don't make sense or should
8334 never be generated. */
8337 validate_condition_mode (enum rtx_code code, enum machine_mode mode)
8339 if (GET_RTX_CLASS (code) != '<'
8340 || GET_MODE_CLASS (mode) != MODE_CC)
8343 /* These don't make sense. */
8344 if ((code == GT || code == LT || code == GE || code == LE)
8345 && mode == CCUNSmode)
8348 if ((code == GTU || code == LTU || code == GEU || code == LEU)
8349 && mode != CCUNSmode)
8352 if (mode != CCFPmode
8353 && (code == ORDERED || code == UNORDERED
8354 || code == UNEQ || code == LTGT
8355 || code == UNGT || code == UNLT
8356 || code == UNGE || code == UNLE))
8359 /* These should never be generated except for
8360 flag_finite_math_only. */
8361 if (mode == CCFPmode
8362 && ! flag_finite_math_only
8363 && (code == LE || code == GE
8364 || code == UNEQ || code == LTGT
8365 || code == UNGT || code == UNLT))
8368 /* These are invalid; the information is not there. */
8369 if (mode == CCEQmode
8370 && code != EQ && code != NE)
8374 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
8375 We only check the opcode against the mode of the CC value here. */
8378 branch_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8380 enum rtx_code code = GET_CODE (op);
8381 enum machine_mode cc_mode;
8383 if (GET_RTX_CLASS (code) != '<')
8386 cc_mode = GET_MODE (XEXP (op, 0));
8387 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
8390 validate_condition_mode (code, cc_mode);
8395 /* Return 1 if OP is a comparison operation that is valid for a branch
8396 insn and which is true if the corresponding bit in the CC register
8400 branch_positive_comparison_operator (rtx op, enum machine_mode mode)
8404 if (! branch_comparison_operator (op, mode))
8407 code = GET_CODE (op);
8408 return (code == EQ || code == LT || code == GT
8409 || code == LTU || code == GTU
8410 || code == UNORDERED);
8413 /* Return 1 if OP is a comparison operation that is valid for an scc
8414 insn: it must be a positive comparison. */
8417 scc_comparison_operator (rtx op, enum machine_mode mode)
8419 return branch_positive_comparison_operator (op, mode);
8423 trap_comparison_operator (rtx op, enum machine_mode mode)
8425 if (mode != VOIDmode && mode != GET_MODE (op))
8427 return GET_RTX_CLASS (GET_CODE (op)) == '<';
8431 boolean_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8433 enum rtx_code code = GET_CODE (op);
8434 return (code == AND || code == IOR || code == XOR);
8438 boolean_or_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8440 enum rtx_code code = GET_CODE (op);
8441 return (code == IOR || code == XOR);
8445 min_max_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8447 enum rtx_code code = GET_CODE (op);
8448 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
8451 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
8452 mask required to convert the result of a rotate insn into a shift
8453 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
8456 includes_lshift_p (rtx shiftop, rtx andop)
8458 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
8460 shift_mask <<= INTVAL (shiftop);
8462 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
8465 /* Similar, but for right shift. */
8468 includes_rshift_p (rtx shiftop, rtx andop)
8470 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
8472 shift_mask >>= INTVAL (shiftop);
8474 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
8477 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
8478 to perform a left shift. It must have exactly SHIFTOP least
8479 significant 0's, then one or more 1's, then zero or more 0's. */
8482 includes_rldic_lshift_p (rtx shiftop, rtx andop)
8484 if (GET_CODE (andop) == CONST_INT)
8486 HOST_WIDE_INT c, lsb, shift_mask;
8489 if (c == 0 || c == ~0)
8493 shift_mask <<= INTVAL (shiftop);
8495 /* Find the least significant one bit. */
8498 /* It must coincide with the LSB of the shift mask. */
8499 if (-lsb != shift_mask)
8502 /* Invert to look for the next transition (if any). */
8505 /* Remove the low group of ones (originally low group of zeros). */
8508 /* Again find the lsb, and check we have all 1's above. */
8512 else if (GET_CODE (andop) == CONST_DOUBLE
8513 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
8515 HOST_WIDE_INT low, high, lsb;
8516 HOST_WIDE_INT shift_mask_low, shift_mask_high;
8518 low = CONST_DOUBLE_LOW (andop);
8519 if (HOST_BITS_PER_WIDE_INT < 64)
8520 high = CONST_DOUBLE_HIGH (andop);
8522 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
8523 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
8526 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
8528 shift_mask_high = ~0;
8529 if (INTVAL (shiftop) > 32)
8530 shift_mask_high <<= INTVAL (shiftop) - 32;
8534 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
8541 return high == -lsb;
8544 shift_mask_low = ~0;
8545 shift_mask_low <<= INTVAL (shiftop);
8549 if (-lsb != shift_mask_low)
8552 if (HOST_BITS_PER_WIDE_INT < 64)
8557 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
8560 return high == -lsb;
8564 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
8570 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
8571 to perform a left shift. It must have SHIFTOP or more least
8572 significant 0's, with the remainder of the word 1's. */
8575 includes_rldicr_lshift_p (rtx shiftop, rtx andop)
8577 if (GET_CODE (andop) == CONST_INT)
8579 HOST_WIDE_INT c, lsb, shift_mask;
8582 shift_mask <<= INTVAL (shiftop);
8585 /* Find the least significant one bit. */
8588 /* It must be covered by the shift mask.
8589 This test also rejects c == 0. */
8590 if ((lsb & shift_mask) == 0)
8593 /* Check we have all 1's above the transition, and reject all 1's. */
8594 return c == -lsb && lsb != 1;
8596 else if (GET_CODE (andop) == CONST_DOUBLE
8597 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
8599 HOST_WIDE_INT low, lsb, shift_mask_low;
8601 low = CONST_DOUBLE_LOW (andop);
8603 if (HOST_BITS_PER_WIDE_INT < 64)
8605 HOST_WIDE_INT high, shift_mask_high;
8607 high = CONST_DOUBLE_HIGH (andop);
8611 shift_mask_high = ~0;
8612 if (INTVAL (shiftop) > 32)
8613 shift_mask_high <<= INTVAL (shiftop) - 32;
8617 if ((lsb & shift_mask_high) == 0)
8620 return high == -lsb;
8626 shift_mask_low = ~0;
8627 shift_mask_low <<= INTVAL (shiftop);
8631 if ((lsb & shift_mask_low) == 0)
8634 return low == -lsb && lsb != 1;
8640 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
8641 for lfq and stfq insns.
8643 Note reg1 and reg2 *must* be hard registers. To be sure we will
8644 abort if we are passed pseudo registers. */
8647 registers_ok_for_quad_peep (rtx reg1, rtx reg2)
8649 /* We might have been passed a SUBREG. */
8650 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
8653 return (REGNO (reg1) == REGNO (reg2) - 1);
8656 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
8657 addr1 and addr2 must be in consecutive memory locations
8658 (addr2 == addr1 + 8). */
8661 addrs_ok_for_quad_peep (rtx addr1, rtx addr2)
8666 /* Extract an offset (if used) from the first addr. */
8667 if (GET_CODE (addr1) == PLUS)
8669 /* If not a REG, return zero. */
8670 if (GET_CODE (XEXP (addr1, 0)) != REG)
8674 reg1 = REGNO (XEXP (addr1, 0));
8675 /* The offset must be constant! */
8676 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
8678 offset1 = INTVAL (XEXP (addr1, 1));
8681 else if (GET_CODE (addr1) != REG)
8685 reg1 = REGNO (addr1);
8686 /* This was a simple (mem (reg)) expression. Offset is 0. */
8690 /* Make sure the second address is a (mem (plus (reg) (const_int)))
8691 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
8692 register as addr1. */
8693 if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
8695 if (GET_CODE (addr2) != PLUS)
8698 if (GET_CODE (XEXP (addr2, 0)) != REG
8699 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
8702 if (reg1 != REGNO (XEXP (addr2, 0)))
8705 /* The offset for the second addr must be 8 more than the first addr. */
8706 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
8709 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
8714 /* Return the register class of a scratch register needed to copy IN into
8715 or out of a register in CLASS in MODE. If it can be done directly,
8716 NO_REGS is returned. */
8719 secondary_reload_class (enum reg_class class,
8720 enum machine_mode mode,
8725 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
8727 && MACHOPIC_INDIRECT
8731 /* We cannot copy a symbolic operand directly into anything
8732 other than BASE_REGS for TARGET_ELF. So indicate that a
8733 register from BASE_REGS is needed as an intermediate
8736 On Darwin, pic addresses require a load from memory, which
8737 needs a base register. */
8738 if (class != BASE_REGS
8739 && (GET_CODE (in) == SYMBOL_REF
8740 || GET_CODE (in) == HIGH
8741 || GET_CODE (in) == LABEL_REF
8742 || GET_CODE (in) == CONST))
8746 if (GET_CODE (in) == REG)
8749 if (regno >= FIRST_PSEUDO_REGISTER)
8751 regno = true_regnum (in);
8752 if (regno >= FIRST_PSEUDO_REGISTER)
8756 else if (GET_CODE (in) == SUBREG)
8758 regno = true_regnum (in);
8759 if (regno >= FIRST_PSEUDO_REGISTER)
8765 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
8767 if (class == GENERAL_REGS || class == BASE_REGS
8768 || (regno >= 0 && INT_REGNO_P (regno)))
8771 /* Constants, memory, and FP registers can go into FP registers. */
8772 if ((regno == -1 || FP_REGNO_P (regno))
8773 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
8776 /* Memory, and AltiVec registers can go into AltiVec registers. */
8777 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
8778 && class == ALTIVEC_REGS)
8781 /* We can copy among the CR registers. */
8782 if ((class == CR_REGS || class == CR0_REGS)
8783 && regno >= 0 && CR_REGNO_P (regno))
8786 /* Otherwise, we need GENERAL_REGS. */
8787 return GENERAL_REGS;
8790 /* Given a comparison operation, return the bit number in CCR to test. We
8791 know this is a valid comparison.
8793 SCC_P is 1 if this is for an scc. That means that %D will have been
8794 used instead of %C, so the bits will be in different places.
8796 Return -1 if OP isn't a valid comparison for some reason. */
8799 ccr_bit (rtx op, int scc_p)
8801 enum rtx_code code = GET_CODE (op);
8802 enum machine_mode cc_mode;
8807 if (GET_RTX_CLASS (code) != '<')
8812 if (GET_CODE (reg) != REG
8813 || ! CR_REGNO_P (REGNO (reg)))
8816 cc_mode = GET_MODE (reg);
8817 cc_regnum = REGNO (reg);
8818 base_bit = 4 * (cc_regnum - CR0_REGNO);
8820 validate_condition_mode (code, cc_mode);
8822 /* When generating a sCOND operation, only positive conditions are
8824 if (scc_p && code != EQ && code != GT && code != LT && code != UNORDERED
8825 && code != GTU && code != LTU)
8831 return scc_p ? base_bit + 3 : base_bit + 2;
8833 return base_bit + 2;
8834 case GT: case GTU: case UNLE:
8835 return base_bit + 1;
8836 case LT: case LTU: case UNGE:
8838 case ORDERED: case UNORDERED:
8839 return base_bit + 3;
8842 /* If scc, we will have done a cror to put the bit in the
8843 unordered position. So test that bit. For integer, this is ! LT
8844 unless this is an scc insn. */
8845 return scc_p ? base_bit + 3 : base_bit;
8848 return scc_p ? base_bit + 3 : base_bit + 1;
8855 /* Return the GOT register. */
8858 rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
8860 /* The second flow pass currently (June 1999) can't update
8861 regs_ever_live without disturbing other parts of the compiler, so
8862 update it here to make the prolog/epilogue code happy. */
8863 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
8864 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
8866 current_function_uses_pic_offset_table = 1;
8868 return pic_offset_table_rtx;
8871 /* Function to init struct machine_function.
8872 This will be called, via a pointer variable,
8873 from push_function_context. */
8875 static struct machine_function *
8876 rs6000_init_machine_status (void)
8878 return ggc_alloc_cleared (sizeof (machine_function));
8881 /* These macros test for integers and extract the low-order bits. */
8883 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
8884 && GET_MODE (X) == VOIDmode)
8886 #define INT_LOWPART(X) \
8887 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
8893 unsigned long val = INT_LOWPART (op);
8895 /* If the high bit is zero, the value is the first 1 bit we find
8897 if ((val & 0x80000000) == 0)
8899 if ((val & 0xffffffff) == 0)
8903 while (((val <<= 1) & 0x80000000) == 0)
8908 /* If the high bit is set and the low bit is not, or the mask is all
8909 1's, the value is zero. */
8910 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
8913 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8916 while (((val >>= 1) & 1) != 0)
8926 unsigned long val = INT_LOWPART (op);
8928 /* If the low bit is zero, the value is the first 1 bit we find from
8932 if ((val & 0xffffffff) == 0)
8936 while (((val >>= 1) & 1) == 0)
8942 /* If the low bit is set and the high bit is not, or the mask is all
8943 1's, the value is 31. */
8944 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
8947 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8950 while (((val <<= 1) & 0x80000000) != 0)
8956 /* Locate some local-dynamic symbol still in use by this function
8957 so that we can print its name in some tls_ld pattern. */
8960 rs6000_get_some_local_dynamic_name (void)
8964 if (cfun->machine->some_ld_name)
8965 return cfun->machine->some_ld_name;
8967 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
8969 && for_each_rtx (&PATTERN (insn),
8970 rs6000_get_some_local_dynamic_name_1, 0))
8971 return cfun->machine->some_ld_name;
8976 /* Helper function for rs6000_get_some_local_dynamic_name. */
8979 rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
8983 if (GET_CODE (x) == SYMBOL_REF)
8985 const char *str = XSTR (x, 0);
8986 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
8988 cfun->machine->some_ld_name = str;
8996 /* Print an operand. Recognize special options, documented below. */
8999 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
9000 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
9002 #define SMALL_DATA_RELOC "sda21"
9003 #define SMALL_DATA_REG 0
9007 print_operand (FILE *file, rtx x, int code)
9011 unsigned HOST_WIDE_INT uval;
9016 /* Write out an instruction after the call which may be replaced
9017 with glue code by the loader. This depends on the AIX version. */
9018 asm_fprintf (file, RS6000_CALL_GLUE);
9021 /* %a is output_address. */
9024 /* If X is a constant integer whose low-order 5 bits are zero,
9025 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
9026 in the AIX assembler where "sri" with a zero shift count
9027 writes a trash instruction. */
9028 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
9035 /* If constant, low-order 16 bits of constant, unsigned.
9036 Otherwise, write normally. */
9038 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
9040 print_operand (file, x, 0);
9044 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
9045 for 64-bit mask direction. */
9046 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
9049 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
9053 /* X is a CR register. Print the number of the GT bit of the CR. */
9054 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9055 output_operand_lossage ("invalid %%E value");
9057 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
9061 /* Like 'J' but get to the EQ bit. */
9062 if (GET_CODE (x) != REG)
9065 /* Bit 1 is EQ bit. */
9066 i = 4 * (REGNO (x) - CR0_REGNO) + 2;
9068 /* If we want bit 31, write a shift count of zero, not 32. */
9069 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9073 /* X is a CR register. Print the number of the EQ bit of the CR */
9074 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9075 output_operand_lossage ("invalid %%E value");
9077 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
9081 /* X is a CR register. Print the shift count needed to move it
9082 to the high-order four bits. */
9083 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9084 output_operand_lossage ("invalid %%f value");
9086 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9090 /* Similar, but print the count for the rotate in the opposite
9092 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9093 output_operand_lossage ("invalid %%F value");
9095 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9099 /* X is a constant integer. If it is negative, print "m",
9100 otherwise print "z". This is to make an aze or ame insn. */
9101 if (GET_CODE (x) != CONST_INT)
9102 output_operand_lossage ("invalid %%G value");
9103 else if (INTVAL (x) >= 0)
9110 /* If constant, output low-order five bits. Otherwise, write
9113 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9115 print_operand (file, x, 0);
9119 /* If constant, output low-order six bits. Otherwise, write
9122 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
9124 print_operand (file, x, 0);
9128 /* Print `i' if this is a constant, else nothing. */
9134 /* Write the bit number in CCR for jump. */
9137 output_operand_lossage ("invalid %%j code");
9139 fprintf (file, "%d", i);
9143 /* Similar, but add one for shift count in rlinm for scc and pass
9144 scc flag to `ccr_bit'. */
9147 output_operand_lossage ("invalid %%J code");
9149 /* If we want bit 31, write a shift count of zero, not 32. */
9150 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9154 /* X must be a constant. Write the 1's complement of the
9157 output_operand_lossage ("invalid %%k value");
9159 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9163 /* X must be a symbolic constant on ELF. Write an
9164 expression suitable for an 'addi' that adds in the low 16
9166 if (GET_CODE (x) != CONST)
9168 print_operand_address (file, x);
9173 if (GET_CODE (XEXP (x, 0)) != PLUS
9174 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
9175 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
9176 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
9177 output_operand_lossage ("invalid %%K value");
9178 print_operand_address (file, XEXP (XEXP (x, 0), 0));
9180 /* For GNU as, there must be a non-alphanumeric character
9181 between 'l' and the number. The '-' is added by
9182 print_operand() already. */
9183 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
9185 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
9189 /* %l is output_asm_label. */
9192 /* Write second word of DImode or DFmode reference. Works on register
9193 or non-indexed memory only. */
9194 if (GET_CODE (x) == REG)
9195 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
9196 else if (GET_CODE (x) == MEM)
9198 /* Handle possible auto-increment. Since it is pre-increment and
9199 we have already done it, we can just use an offset of word. */
9200 if (GET_CODE (XEXP (x, 0)) == PRE_INC
9201 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9202 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
9205 output_address (XEXP (adjust_address_nv (x, SImode,
9209 if (small_data_operand (x, GET_MODE (x)))
9210 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9211 reg_names[SMALL_DATA_REG]);
9216 /* MB value for a mask operand. */
9217 if (! mask_operand (x, SImode))
9218 output_operand_lossage ("invalid %%m value");
9220 fprintf (file, "%d", extract_MB (x));
9224 /* ME value for a mask operand. */
9225 if (! mask_operand (x, SImode))
9226 output_operand_lossage ("invalid %%M value");
9228 fprintf (file, "%d", extract_ME (x));
9231 /* %n outputs the negative of its operand. */
9234 /* Write the number of elements in the vector times 4. */
9235 if (GET_CODE (x) != PARALLEL)
9236 output_operand_lossage ("invalid %%N value");
9238 fprintf (file, "%d", XVECLEN (x, 0) * 4);
9242 /* Similar, but subtract 1 first. */
9243 if (GET_CODE (x) != PARALLEL)
9244 output_operand_lossage ("invalid %%O value");
9246 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9250 /* X is a CONST_INT that is a power of two. Output the logarithm. */
9252 || INT_LOWPART (x) < 0
9253 || (i = exact_log2 (INT_LOWPART (x))) < 0)
9254 output_operand_lossage ("invalid %%p value");
9256 fprintf (file, "%d", i);
9260 /* The operand must be an indirect memory reference. The result
9261 is the register name. */
9262 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
9263 || REGNO (XEXP (x, 0)) >= 32)
9264 output_operand_lossage ("invalid %%P value");
9266 fprintf (file, "%s", reg_names[REGNO (XEXP (x, 0))]);
9270 /* This outputs the logical code corresponding to a boolean
9271 expression. The expression may have one or both operands
9272 negated (if one, only the first one). For condition register
9273 logical operations, it will also treat the negated
9274 CR codes as NOTs, but not handle NOTs of them. */
9276 const char *const *t = 0;
9278 enum rtx_code code = GET_CODE (x);
9279 static const char * const tbl[3][3] = {
9280 { "and", "andc", "nor" },
9281 { "or", "orc", "nand" },
9282 { "xor", "eqv", "xor" } };
9286 else if (code == IOR)
9288 else if (code == XOR)
9291 output_operand_lossage ("invalid %%q value");
9293 if (GET_CODE (XEXP (x, 0)) != NOT)
9297 if (GET_CODE (XEXP (x, 1)) == NOT)
9315 /* X is a CR register. Print the mask for `mtcrf'. */
9316 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9317 output_operand_lossage ("invalid %%R value");
9319 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9323 /* Low 5 bits of 32 - value */
9325 output_operand_lossage ("invalid %%s value");
9327 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9331 /* PowerPC64 mask position. All 0's is excluded.
9332 CONST_INT 32-bit mask is considered sign-extended so any
9333 transition must occur within the CONST_INT, not on the boundary. */
9334 if (! mask64_operand (x, DImode))
9335 output_operand_lossage ("invalid %%S value");
9337 uval = INT_LOWPART (x);
9339 if (uval & 1) /* Clear Left */
9341 #if HOST_BITS_PER_WIDE_INT > 64
9342 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
9346 else /* Clear Right */
9349 #if HOST_BITS_PER_WIDE_INT > 64
9350 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
9358 fprintf (file, "%d", i);
9362 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
9363 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
9366 /* Bit 3 is OV bit. */
9367 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
9369 /* If we want bit 31, write a shift count of zero, not 32. */
9370 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9374 /* Print the symbolic name of a branch target register. */
9375 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
9376 && REGNO (x) != COUNT_REGISTER_REGNUM))
9377 output_operand_lossage ("invalid %%T value");
9378 else if (REGNO (x) == LINK_REGISTER_REGNUM)
9379 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
9381 fputs ("ctr", file);
9385 /* High-order 16 bits of constant for use in unsigned operand. */
9387 output_operand_lossage ("invalid %%u value");
9389 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
9390 (INT_LOWPART (x) >> 16) & 0xffff);
9394 /* High-order 16 bits of constant for use in signed operand. */
9396 output_operand_lossage ("invalid %%v value");
9398 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
9399 (INT_LOWPART (x) >> 16) & 0xffff);
9403 /* Print `u' if this has an auto-increment or auto-decrement. */
9404 if (GET_CODE (x) == MEM
9405 && (GET_CODE (XEXP (x, 0)) == PRE_INC
9406 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
9411 /* Print the trap code for this operand. */
9412 switch (GET_CODE (x))
9415 fputs ("eq", file); /* 4 */
9418 fputs ("ne", file); /* 24 */
9421 fputs ("lt", file); /* 16 */
9424 fputs ("le", file); /* 20 */
9427 fputs ("gt", file); /* 8 */
9430 fputs ("ge", file); /* 12 */
9433 fputs ("llt", file); /* 2 */
9436 fputs ("lle", file); /* 6 */
9439 fputs ("lgt", file); /* 1 */
9442 fputs ("lge", file); /* 5 */
9450 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
9453 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
9454 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9456 print_operand (file, x, 0);
9460 /* MB value for a PowerPC64 rldic operand. */
9461 val = (GET_CODE (x) == CONST_INT
9462 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
9467 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
9468 if ((val <<= 1) < 0)
9471 #if HOST_BITS_PER_WIDE_INT == 32
9472 if (GET_CODE (x) == CONST_INT && i >= 0)
9473 i += 32; /* zero-extend high-part was all 0's */
9474 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
9476 val = CONST_DOUBLE_LOW (x);
9483 for ( ; i < 64; i++)
9484 if ((val <<= 1) < 0)
9489 fprintf (file, "%d", i + 1);
9493 if (GET_CODE (x) == MEM
9494 && legitimate_indexed_address_p (XEXP (x, 0), 0))
9499 /* Like 'L', for third word of TImode */
9500 if (GET_CODE (x) == REG)
9501 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
9502 else if (GET_CODE (x) == MEM)
9504 if (GET_CODE (XEXP (x, 0)) == PRE_INC
9505 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9506 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9508 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
9509 if (small_data_operand (x, GET_MODE (x)))
9510 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9511 reg_names[SMALL_DATA_REG]);
9516 /* X is a SYMBOL_REF. Write out the name preceded by a
9517 period and without any trailing data in brackets. Used for function
9518 names. If we are configured for System V (or the embedded ABI) on
9519 the PowerPC, do not emit the period, since those systems do not use
9520 TOCs and the like. */
9521 if (GET_CODE (x) != SYMBOL_REF)
9524 if (XSTR (x, 0)[0] != '.')
9526 switch (DEFAULT_ABI)
9541 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
9543 assemble_name (file, XSTR (x, 0));
9547 /* Like 'L', for last word of TImode. */
9548 if (GET_CODE (x) == REG)
9549 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
9550 else if (GET_CODE (x) == MEM)
9552 if (GET_CODE (XEXP (x, 0)) == PRE_INC
9553 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9554 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9556 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
9557 if (small_data_operand (x, GET_MODE (x)))
9558 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9559 reg_names[SMALL_DATA_REG]);
9563 /* Print AltiVec or SPE memory operand. */
9568 if (GET_CODE (x) != MEM)
9576 if (GET_CODE (tmp) == REG)
9578 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
9581 /* Handle [reg+UIMM]. */
9582 else if (GET_CODE (tmp) == PLUS &&
9583 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
9587 if (GET_CODE (XEXP (tmp, 0)) != REG)
9590 x = INTVAL (XEXP (tmp, 1));
9591 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
9595 /* Fall through. Must be [reg+reg]. */
9597 if (GET_CODE (tmp) == REG)
9598 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
9599 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
9601 if (REGNO (XEXP (tmp, 0)) == 0)
9602 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
9603 reg_names[ REGNO (XEXP (tmp, 0)) ]);
9605 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
9606 reg_names[ REGNO (XEXP (tmp, 1)) ]);
9614 if (GET_CODE (x) == REG)
9615 fprintf (file, "%s", reg_names[REGNO (x)]);
9616 else if (GET_CODE (x) == MEM)
9618 /* We need to handle PRE_INC and PRE_DEC here, since we need to
9619 know the width from the mode. */
9620 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
9621 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
9622 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9623 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
9624 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
9625 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9627 output_address (XEXP (x, 0));
9630 output_addr_const (file, x);
9634 assemble_name (file, rs6000_get_some_local_dynamic_name ());
9638 output_operand_lossage ("invalid %%xn code");
9642 /* Print the address of an operand. */
9645 print_operand_address (FILE *file, rtx x)
9647 if (GET_CODE (x) == REG)
9648 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9649 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
9650 || GET_CODE (x) == LABEL_REF)
9652 output_addr_const (file, x);
9653 if (small_data_operand (x, GET_MODE (x)))
9654 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9655 reg_names[SMALL_DATA_REG]);
9656 else if (TARGET_TOC)
9659 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
9661 if (REGNO (XEXP (x, 0)) == 0)
9662 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
9663 reg_names[ REGNO (XEXP (x, 0)) ]);
9665 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
9666 reg_names[ REGNO (XEXP (x, 1)) ]);
9668 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
9669 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
9670 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
9672 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
9673 && CONSTANT_P (XEXP (x, 1)))
9675 output_addr_const (file, XEXP (x, 1));
9676 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
9680 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
9681 && CONSTANT_P (XEXP (x, 1)))
9683 fprintf (file, "lo16(");
9684 output_addr_const (file, XEXP (x, 1));
9685 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
9688 else if (legitimate_constant_pool_address_p (x))
9690 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
9692 rtx contains_minus = XEXP (x, 1);
9696 /* Find the (minus (sym) (toc)) buried in X, and temporarily
9697 turn it into (sym) for output_addr_const. */
9698 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
9699 contains_minus = XEXP (contains_minus, 0);
9701 minus = XEXP (contains_minus, 0);
9702 symref = XEXP (minus, 0);
9703 XEXP (contains_minus, 0) = symref;
9708 name = XSTR (symref, 0);
9709 newname = alloca (strlen (name) + sizeof ("@toc"));
9710 strcpy (newname, name);
9711 strcat (newname, "@toc");
9712 XSTR (symref, 0) = newname;
9714 output_addr_const (file, XEXP (x, 1));
9716 XSTR (symref, 0) = name;
9717 XEXP (contains_minus, 0) = minus;
9720 output_addr_const (file, XEXP (x, 1));
9722 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
9728 /* Target hook for assembling integer objects. The PowerPC version has
9729 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
9730 is defined. It also needs to handle DI-mode objects on 64-bit
9734 rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
9736 #ifdef RELOCATABLE_NEEDS_FIXUP
9737 /* Special handling for SI values. */
9738 if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
9740 extern int in_toc_section (void);
9741 static int recurse = 0;
9743 /* For -mrelocatable, we mark all addresses that need to be fixed up
9744 in the .fixup section. */
9745 if (TARGET_RELOCATABLE
9746 && !in_toc_section ()
9747 && !in_text_section ()
9749 && GET_CODE (x) != CONST_INT
9750 && GET_CODE (x) != CONST_DOUBLE
9756 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
9758 ASM_OUTPUT_LABEL (asm_out_file, buf);
9759 fprintf (asm_out_file, "\t.long\t(");
9760 output_addr_const (asm_out_file, x);
9761 fprintf (asm_out_file, ")@fixup\n");
9762 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
9763 ASM_OUTPUT_ALIGN (asm_out_file, 2);
9764 fprintf (asm_out_file, "\t.long\t");
9765 assemble_name (asm_out_file, buf);
9766 fprintf (asm_out_file, "\n\t.previous\n");
9770 /* Remove initial .'s to turn a -mcall-aixdesc function
9771 address into the address of the descriptor, not the function
9773 else if (GET_CODE (x) == SYMBOL_REF
9774 && XSTR (x, 0)[0] == '.'
9775 && DEFAULT_ABI == ABI_AIX)
9777 const char *name = XSTR (x, 0);
9778 while (*name == '.')
9781 fprintf (asm_out_file, "\t.long\t%s\n", name);
9785 #endif /* RELOCATABLE_NEEDS_FIXUP */
9786 return default_assemble_integer (x, size, aligned_p);
9789 #ifdef HAVE_GAS_HIDDEN
9790 /* Emit an assembler directive to set symbol visibility for DECL to
9794 rs6000_assemble_visibility (tree decl, int vis)
9796 /* Functions need to have their entry point symbol visibility set as
9797 well as their descriptor symbol visibility. */
9798 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
9800 static const char * const visibility_types[] = {
9801 NULL, "internal", "hidden", "protected"
9804 const char *name, *type;
9806 name = ((* targetm.strip_name_encoding)
9807 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
9808 type = visibility_types[vis];
9810 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
9811 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
9814 default_assemble_visibility (decl, vis);
9819 rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
9821 /* Reversal of FP compares takes care -- an ordered compare
9822 becomes an unordered compare and vice versa. */
9823 if (mode == CCFPmode
9824 && (!flag_finite_math_only
9825 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
9826 || code == UNEQ || code == LTGT))
9827 return reverse_condition_maybe_unordered (code);
9829 return reverse_condition (code);
9832 /* Generate a compare for CODE. Return a brand-new rtx that
9833 represents the result of the compare. */
9836 rs6000_generate_compare (enum rtx_code code)
9838 enum machine_mode comp_mode;
9841 if (rs6000_compare_fp_p)
9842 comp_mode = CCFPmode;
9843 else if (code == GTU || code == LTU
9844 || code == GEU || code == LEU)
9845 comp_mode = CCUNSmode;
9849 /* First, the compare. */
9850 compare_result = gen_reg_rtx (comp_mode);
9852 /* SPE FP compare instructions on the GPRs. Yuck! */
9853 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
9854 && rs6000_compare_fp_p)
9856 rtx cmp, or1, or2, or_result, compare_result2;
9858 /* Note: The E500 comparison instructions set the GT bit (x +
9859 1), on success. This explains the mess. */
9863 case EQ: case UNEQ: case NE: case LTGT:
9864 cmp = flag_finite_math_only
9865 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
9867 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
9868 rs6000_compare_op1);
9870 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
9871 cmp = flag_finite_math_only
9872 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
9874 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
9875 rs6000_compare_op1);
9877 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
9878 cmp = flag_finite_math_only
9879 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
9881 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
9882 rs6000_compare_op1);
9888 /* Synthesize LE and GE from LT/GT || EQ. */
9889 if (code == LE || code == GE || code == LEU || code == GEU)
9895 case LE: code = LT; break;
9896 case GE: code = GT; break;
9897 case LEU: code = LT; break;
9898 case GEU: code = GT; break;
9902 or1 = gen_reg_rtx (SImode);
9903 or2 = gen_reg_rtx (SImode);
9904 or_result = gen_reg_rtx (CCEQmode);
9905 compare_result2 = gen_reg_rtx (CCFPmode);
9908 cmp = flag_finite_math_only
9909 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
9911 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
9912 rs6000_compare_op1);
9915 or1 = gen_rtx_GT (SImode, compare_result, const0_rtx);
9916 or2 = gen_rtx_GT (SImode, compare_result2, const0_rtx);
9918 /* OR them together. */
9919 cmp = gen_rtx_SET (VOIDmode, or_result,
9920 gen_rtx_COMPARE (CCEQmode,
9921 gen_rtx_IOR (SImode, or1, or2),
9923 compare_result = or_result;
9928 if (code == NE || code == LTGT)
9938 /* Generate XLC-compatible TFmode compare as PARALLEL with extra
9939 CLOBBERs to match cmptf_internal2 pattern. */
9940 if (comp_mode == CCFPmode && TARGET_XL_COMPAT
9941 && GET_MODE (rs6000_compare_op0) == TFmode
9942 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
9943 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
9944 emit_insn (gen_rtx_PARALLEL (VOIDmode,
9946 gen_rtx_SET (VOIDmode,
9948 gen_rtx_COMPARE (comp_mode,
9950 rs6000_compare_op1)),
9951 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
9952 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
9953 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
9954 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
9955 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
9956 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
9957 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
9958 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
9960 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
9961 gen_rtx_COMPARE (comp_mode,
9963 rs6000_compare_op1)));
9966 /* Some kinds of FP comparisons need an OR operation;
9967 under flag_finite_math_only we don't bother. */
9968 if (rs6000_compare_fp_p
9969 && ! flag_finite_math_only
9970 && ! (TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
9971 && (code == LE || code == GE
9972 || code == UNEQ || code == LTGT
9973 || code == UNGT || code == UNLT))
9975 enum rtx_code or1, or2;
9976 rtx or1_rtx, or2_rtx, compare2_rtx;
9977 rtx or_result = gen_reg_rtx (CCEQmode);
9981 case LE: or1 = LT; or2 = EQ; break;
9982 case GE: or1 = GT; or2 = EQ; break;
9983 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
9984 case LTGT: or1 = LT; or2 = GT; break;
9985 case UNGT: or1 = UNORDERED; or2 = GT; break;
9986 case UNLT: or1 = UNORDERED; or2 = LT; break;
9989 validate_condition_mode (or1, comp_mode);
9990 validate_condition_mode (or2, comp_mode);
9991 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
9992 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
9993 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
9994 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
9996 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
9998 compare_result = or_result;
10002 validate_condition_mode (code, GET_MODE (compare_result));
10004 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
10008 /* Emit the RTL for an sCOND pattern. */
10011 rs6000_emit_sCOND (enum rtx_code code, rtx result)
10014 enum machine_mode op_mode;
10015 enum rtx_code cond_code;
10017 condition_rtx = rs6000_generate_compare (code);
10018 cond_code = GET_CODE (condition_rtx);
10020 if (TARGET_E500 && rs6000_compare_fp_p
10021 && !TARGET_FPRS && TARGET_HARD_FLOAT)
10025 PUT_MODE (condition_rtx, SImode);
10026 t = XEXP (condition_rtx, 0);
10028 if (cond_code != NE && cond_code != EQ)
10031 if (cond_code == NE)
10032 emit_insn (gen_e500_flip_eq_bit (t, t));
10034 emit_insn (gen_move_from_CR_eq_bit (result, t));
10038 if (cond_code == NE
10039 || cond_code == GE || cond_code == LE
10040 || cond_code == GEU || cond_code == LEU
10041 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
10043 rtx not_result = gen_reg_rtx (CCEQmode);
10044 rtx not_op, rev_cond_rtx;
10045 enum machine_mode cc_mode;
10047 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
10049 rev_cond_rtx = gen_rtx (rs6000_reverse_condition (cc_mode, cond_code),
10050 SImode, XEXP (condition_rtx, 0), const0_rtx);
10051 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
10052 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
10053 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
10056 op_mode = GET_MODE (rs6000_compare_op0);
10057 if (op_mode == VOIDmode)
10058 op_mode = GET_MODE (rs6000_compare_op1);
10060 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
10062 PUT_MODE (condition_rtx, DImode);
10063 convert_move (result, condition_rtx, 0);
10067 PUT_MODE (condition_rtx, SImode);
10068 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
10072 /* Emit a branch of kind CODE to location LOC. */
10075 rs6000_emit_cbranch (enum rtx_code code, rtx loc)
10077 rtx condition_rtx, loc_ref;
10079 condition_rtx = rs6000_generate_compare (code);
10080 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
10081 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
10082 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
10083 loc_ref, pc_rtx)));
10086 /* Return the string to output a conditional branch to LABEL, which is
10087 the operand number of the label, or -1 if the branch is really a
10088 conditional return.
10090 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
10091 condition code register and its mode specifies what kind of
10092 comparison we made.
10094 REVERSED is nonzero if we should reverse the sense of the comparison.
10096 INSN is the insn. */
10099 output_cbranch (rtx op, const char *label, int reversed, rtx insn)
10101 static char string[64];
10102 enum rtx_code code = GET_CODE (op);
10103 rtx cc_reg = XEXP (op, 0);
10104 enum machine_mode mode = GET_MODE (cc_reg);
10105 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
10106 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
10107 int really_reversed = reversed ^ need_longbranch;
10113 validate_condition_mode (code, mode);
10115 /* Work out which way this really branches. We could use
10116 reverse_condition_maybe_unordered here always but this
10117 makes the resulting assembler clearer. */
10118 if (really_reversed)
10120 /* Reversal of FP compares takes care -- an ordered compare
10121 becomes an unordered compare and vice versa. */
10122 if (mode == CCFPmode)
10123 code = reverse_condition_maybe_unordered (code);
10125 code = reverse_condition (code);
10128 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
10130 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
10133 /* Opposite of GT. */
10135 else if (code == NE)
10143 /* Not all of these are actually distinct opcodes, but
10144 we distinguish them for clarity of the resulting assembler. */
10145 case NE: case LTGT:
10146 ccode = "ne"; break;
10147 case EQ: case UNEQ:
10148 ccode = "eq"; break;
10150 ccode = "ge"; break;
10151 case GT: case GTU: case UNGT:
10152 ccode = "gt"; break;
10154 ccode = "le"; break;
10155 case LT: case LTU: case UNLT:
10156 ccode = "lt"; break;
10157 case UNORDERED: ccode = "un"; break;
10158 case ORDERED: ccode = "nu"; break;
10159 case UNGE: ccode = "nl"; break;
10160 case UNLE: ccode = "ng"; break;
10165 /* Maybe we have a guess as to how likely the branch is.
10166 The old mnemonics don't have a way to specify this information. */
10168 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
10169 if (note != NULL_RTX)
10171 /* PROB is the difference from 50%. */
10172 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
10174 /* Only hint for highly probable/improbable branches on newer
10175 cpus as static prediction overrides processor dynamic
10176 prediction. For older cpus we may as well always hint, but
10177 assume not taken for branches that are very close to 50% as a
10178 mispredicted taken branch is more expensive than a
10179 mispredicted not-taken branch. */
10180 if (rs6000_always_hint
10181 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
10183 if (abs (prob) > REG_BR_PROB_BASE / 20
10184 && ((prob > 0) ^ need_longbranch))
10192 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
10194 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
10196 /* We need to escape any '%' characters in the reg_names string.
10197 Assume they'd only be the first character.... */
10198 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
10200 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
10204 /* If the branch distance was too far, we may have to use an
10205 unconditional branch to go the distance. */
10206 if (need_longbranch)
10207 s += sprintf (s, ",$+8\n\tb %s", label);
10209 s += sprintf (s, ",%s", label);
10215 /* Return the string to flip the EQ bit on a CR. */
10217 output_e500_flip_eq_bit (rtx dst, rtx src)
10219 static char string[64];
10222 if (GET_CODE (dst) != REG || ! CR_REGNO_P (REGNO (dst))
10223 || GET_CODE (src) != REG || ! CR_REGNO_P (REGNO (src)))
10227 a = 4 * (REGNO (dst) - CR0_REGNO) + 2;
10228 b = 4 * (REGNO (src) - CR0_REGNO) + 2;
10230 sprintf (string, "crnot %d,%d", a, b);
10234 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
10235 operands of the last comparison is nonzero/true, FALSE_COND if it
10236 is zero/false. Return 0 if the hardware has no such operation. */
10239 rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
10241 enum rtx_code code = GET_CODE (op);
10242 rtx op0 = rs6000_compare_op0;
10243 rtx op1 = rs6000_compare_op1;
10244 REAL_VALUE_TYPE c1;
10245 enum machine_mode compare_mode = GET_MODE (op0);
10246 enum machine_mode result_mode = GET_MODE (dest);
10249 /* These modes should always match. */
10250 if (GET_MODE (op1) != compare_mode
10251 /* In the isel case however, we can use a compare immediate, so
10252 op1 may be a small constant. */
10253 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
10255 if (GET_MODE (true_cond) != result_mode)
10257 if (GET_MODE (false_cond) != result_mode)
10260 /* First, work out if the hardware can do this at all, or
10261 if it's too slow.... */
10262 if (! rs6000_compare_fp_p)
10265 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
10268 else if (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS
10269 && GET_MODE_CLASS (compare_mode) == MODE_FLOAT)
10272 /* Eliminate half of the comparisons by switching operands, this
10273 makes the remaining code simpler. */
10274 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
10275 || code == LTGT || code == LT || code == UNLE)
10277 code = reverse_condition_maybe_unordered (code);
10279 true_cond = false_cond;
10283 /* UNEQ and LTGT take four instructions for a comparison with zero,
10284 it'll probably be faster to use a branch here too. */
10285 if (code == UNEQ && HONOR_NANS (compare_mode))
10288 if (GET_CODE (op1) == CONST_DOUBLE)
10289 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
10291 /* We're going to try to implement comparisons by performing
10292 a subtract, then comparing against zero. Unfortunately,
10293 Inf - Inf is NaN which is not zero, and so if we don't
10294 know that the operand is finite and the comparison
10295 would treat EQ different to UNORDERED, we can't do it. */
10296 if (HONOR_INFINITIES (compare_mode)
10297 && code != GT && code != UNGE
10298 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
10299 /* Constructs of the form (a OP b ? a : b) are safe. */
10300 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
10301 || (! rtx_equal_p (op0, true_cond)
10302 && ! rtx_equal_p (op1, true_cond))))
10304 /* At this point we know we can use fsel. */
10306 /* Reduce the comparison to a comparison against zero. */
10307 temp = gen_reg_rtx (compare_mode);
10308 emit_insn (gen_rtx_SET (VOIDmode, temp,
10309 gen_rtx_MINUS (compare_mode, op0, op1)));
10311 op1 = CONST0_RTX (compare_mode);
10313 /* If we don't care about NaNs we can reduce some of the comparisons
10314 down to faster ones. */
10315 if (! HONOR_NANS (compare_mode))
10321 true_cond = false_cond;
10334 /* Now, reduce everything down to a GE. */
10341 temp = gen_reg_rtx (compare_mode);
10342 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10347 temp = gen_reg_rtx (compare_mode);
10348 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
10353 temp = gen_reg_rtx (compare_mode);
10354 emit_insn (gen_rtx_SET (VOIDmode, temp,
10355 gen_rtx_NEG (compare_mode,
10356 gen_rtx_ABS (compare_mode, op0))));
10361 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
10362 temp = gen_reg_rtx (result_mode);
10363 emit_insn (gen_rtx_SET (VOIDmode, temp,
10364 gen_rtx_IF_THEN_ELSE (result_mode,
10365 gen_rtx_GE (VOIDmode,
10367 true_cond, false_cond)));
10368 false_cond = true_cond;
10371 temp = gen_reg_rtx (compare_mode);
10372 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10377 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
10378 temp = gen_reg_rtx (result_mode);
10379 emit_insn (gen_rtx_SET (VOIDmode, temp,
10380 gen_rtx_IF_THEN_ELSE (result_mode,
10381 gen_rtx_GE (VOIDmode,
10383 true_cond, false_cond)));
10384 true_cond = false_cond;
10387 temp = gen_reg_rtx (compare_mode);
10388 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10396 emit_insn (gen_rtx_SET (VOIDmode, dest,
10397 gen_rtx_IF_THEN_ELSE (result_mode,
10398 gen_rtx_GE (VOIDmode,
10400 true_cond, false_cond)));
10404 /* Same as above, but for ints (isel). */
10407 rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
10409 rtx condition_rtx, cr;
10411 /* All isel implementations thus far are 32-bits. */
10412 if (GET_MODE (rs6000_compare_op0) != SImode)
10415 /* We still have to do the compare, because isel doesn't do a
10416 compare, it just looks at the CRx bits set by a previous compare
10418 condition_rtx = rs6000_generate_compare (GET_CODE (op));
10419 cr = XEXP (condition_rtx, 0);
10421 if (GET_MODE (cr) == CCmode)
10422 emit_insn (gen_isel_signed (dest, condition_rtx,
10423 true_cond, false_cond, cr));
10425 emit_insn (gen_isel_unsigned (dest, condition_rtx,
10426 true_cond, false_cond, cr));
10432 output_isel (rtx *operands)
10434 enum rtx_code code;
10436 code = GET_CODE (operands[1]);
10437 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
10439 PUT_CODE (operands[1], reverse_condition (code));
10440 return "isel %0,%3,%2,%j1";
10443 return "isel %0,%2,%3,%j1";
10447 rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
10449 enum machine_mode mode = GET_MODE (op0);
10453 if (code == SMAX || code == SMIN)
10458 if (code == SMAX || code == UMAX)
10459 target = emit_conditional_move (dest, c, op0, op1, mode,
10460 op0, op1, mode, 0);
10462 target = emit_conditional_move (dest, c, op0, op1, mode,
10463 op1, op0, mode, 0);
10464 if (target == NULL_RTX)
10466 if (target != dest)
10467 emit_move_insn (dest, target);
10470 /* Emit instructions to move SRC to DST. Called by splitters for
10471 multi-register moves. It will emit at most one instruction for
10472 each register that is accessed; that is, it won't emit li/lis pairs
10473 (or equivalent for 64-bit code). One of SRC or DST must be a hard
10477 rs6000_split_multireg_move (rtx dst, rtx src)
10479 /* The register number of the first register being moved. */
10481 /* The mode that is to be moved. */
10482 enum machine_mode mode;
10483 /* The mode that the move is being done in, and its size. */
10484 enum machine_mode reg_mode;
10486 /* The number of registers that will be moved. */
10489 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
10490 mode = GET_MODE (dst);
10491 nregs = HARD_REGNO_NREGS (reg, mode);
10492 if (FP_REGNO_P (reg))
10494 else if (ALTIVEC_REGNO_P (reg))
10495 reg_mode = V16QImode;
10497 reg_mode = word_mode;
10498 reg_mode_size = GET_MODE_SIZE (reg_mode);
10500 if (reg_mode_size * nregs != GET_MODE_SIZE (mode))
10503 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
10505 /* Move register range backwards, if we might have destructive
10508 for (i = nregs - 1; i >= 0; i--)
10509 emit_insn (gen_rtx_SET (VOIDmode,
10510 simplify_gen_subreg (reg_mode, dst, mode,
10511 i * reg_mode_size),
10512 simplify_gen_subreg (reg_mode, src, mode,
10513 i * reg_mode_size)));
10519 bool used_update = false;
10521 if (GET_CODE (src) == MEM && INT_REGNO_P (reg))
10525 if (GET_CODE (XEXP (src, 0)) == PRE_INC
10526 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
10529 breg = XEXP (XEXP (src, 0), 0);
10530 delta_rtx = GET_CODE (XEXP (src, 0)) == PRE_INC
10531 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
10532 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src)));
10533 emit_insn (TARGET_32BIT
10534 ? gen_addsi3 (breg, breg, delta_rtx)
10535 : gen_adddi3 (breg, breg, delta_rtx));
10536 src = gen_rtx_MEM (mode, breg);
10538 else if (! offsettable_memref_p (src))
10540 rtx newsrc, basereg;
10541 basereg = gen_rtx_REG (Pmode, reg);
10542 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
10543 newsrc = gen_rtx_MEM (GET_MODE (src), basereg);
10544 MEM_COPY_ATTRIBUTES (newsrc, src);
10548 breg = XEXP (src, 0);
10549 if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
10550 breg = XEXP (breg, 0);
10552 /* If the base register we are using to address memory is
10553 also a destination reg, then change that register last. */
10555 && REGNO (breg) >= REGNO (dst)
10556 && REGNO (breg) < REGNO (dst) + nregs)
10557 j = REGNO (breg) - REGNO (dst);
10560 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
10564 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
10565 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
10568 breg = XEXP (XEXP (dst, 0), 0);
10569 delta_rtx = GET_CODE (XEXP (dst, 0)) == PRE_INC
10570 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
10571 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst)));
10573 /* We have to update the breg before doing the store.
10574 Use store with update, if available. */
10578 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
10579 emit_insn (TARGET_32BIT
10580 ? gen_movsi_update (breg, breg, delta_rtx, nsrc)
10581 : gen_movdi_update (breg, breg, delta_rtx, nsrc));
10582 used_update = true;
10585 emit_insn (TARGET_32BIT
10586 ? gen_addsi3 (breg, breg, delta_rtx)
10587 : gen_adddi3 (breg, breg, delta_rtx));
10588 dst = gen_rtx_MEM (mode, breg);
10590 else if (! offsettable_memref_p (dst))
10594 for (i = 0; i < nregs; i++)
10596 /* Calculate index to next subword. */
10601 /* If compiler already emitted move of first word by
10602 store with update, no need to do anything. */
10603 if (j == 0 && used_update)
10606 emit_insn (gen_rtx_SET (VOIDmode,
10607 simplify_gen_subreg (reg_mode, dst, mode,
10608 j * reg_mode_size),
10609 simplify_gen_subreg (reg_mode, src, mode,
10610 j * reg_mode_size)));
10616 /* This page contains routines that are used to determine what the
10617 function prologue and epilogue code will do and write them out. */
10619 /* Return the first fixed-point register that is required to be
10620 saved. 32 if none. */
10623 first_reg_to_save (void)
10627 /* Find lowest numbered live register. */
10628 for (first_reg = 13; first_reg <= 31; first_reg++)
10629 if (regs_ever_live[first_reg]
10630 && (! call_used_regs[first_reg]
10631 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
10632 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
10633 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
10634 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
10639 && current_function_uses_pic_offset_table
10640 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
10641 return RS6000_PIC_OFFSET_TABLE_REGNUM;
10647 /* Similar, for FP regs. */
10650 first_fp_reg_to_save (void)
10654 /* Find lowest numbered live register. */
10655 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
10656 if (regs_ever_live[first_reg])
10662 /* Similar, for AltiVec regs. */
10665 first_altivec_reg_to_save (void)
10669 /* Stack frame remains as is unless we are in AltiVec ABI. */
10670 if (! TARGET_ALTIVEC_ABI)
10671 return LAST_ALTIVEC_REGNO + 1;
10673 /* Find lowest numbered live register. */
10674 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
10675 if (regs_ever_live[i])
10681 /* Return a 32-bit mask of the AltiVec registers we need to set in
10682 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
10683 the 32-bit word is 0. */
10685 static unsigned int
10686 compute_vrsave_mask (void)
10688 unsigned int i, mask = 0;
10690 /* First, find out if we use _any_ altivec registers. */
10691 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
10692 if (regs_ever_live[i])
10693 mask |= ALTIVEC_REG_BIT (i);
10698 /* Next, remove the argument registers from the set. These must
10699 be in the VRSAVE mask set by the caller, so we don't need to add
10700 them in again. More importantly, the mask we compute here is
10701 used to generate CLOBBERs in the set_vrsave insn, and we do not
10702 wish the argument registers to die. */
10703 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
10704 mask &= ~ALTIVEC_REG_BIT (i);
10706 /* Similarly, remove the return value from the set. */
10709 diddle_return_value (is_altivec_return_reg, &yes);
10711 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
10718 is_altivec_return_reg (rtx reg, void *xyes)
10720 bool *yes = (bool *) xyes;
10721 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
10726 /* Calculate the stack information for the current function. This is
10727 complicated by having two separate calling sequences, the AIX calling
10728 sequence and the V.4 calling sequence.
10730 AIX (and Darwin/Mac OS X) stack frames look like:
10732 SP----> +---------------------------------------+
10733 | back chain to caller | 0 0
10734 +---------------------------------------+
10735 | saved CR | 4 8 (8-11)
10736 +---------------------------------------+
10738 +---------------------------------------+
10739 | reserved for compilers | 12 24
10740 +---------------------------------------+
10741 | reserved for binders | 16 32
10742 +---------------------------------------+
10743 | saved TOC pointer | 20 40
10744 +---------------------------------------+
10745 | Parameter save area (P) | 24 48
10746 +---------------------------------------+
10747 | Alloca space (A) | 24+P etc.
10748 +---------------------------------------+
10749 | Local variable space (L) | 24+P+A
10750 +---------------------------------------+
10751 | Float/int conversion temporary (X) | 24+P+A+L
10752 +---------------------------------------+
10753 | Save area for AltiVec registers (W) | 24+P+A+L+X
10754 +---------------------------------------+
10755 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
10756 +---------------------------------------+
10757 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
10758 +---------------------------------------+
10759 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
10760 +---------------------------------------+
10761 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
10762 +---------------------------------------+
10763 old SP->| back chain to caller's caller |
10764 +---------------------------------------+
10766 The required alignment for AIX configurations is two words (i.e., 8
10770 V.4 stack frames look like:
10772 SP----> +---------------------------------------+
10773 | back chain to caller | 0
10774 +---------------------------------------+
10775 | caller's saved LR | 4
10776 +---------------------------------------+
10777 | Parameter save area (P) | 8
10778 +---------------------------------------+
10779 | Alloca space (A) | 8+P
10780 +---------------------------------------+
10781 | Varargs save area (V) | 8+P+A
10782 +---------------------------------------+
10783 | Local variable space (L) | 8+P+A+V
10784 +---------------------------------------+
10785 | Float/int conversion temporary (X) | 8+P+A+V+L
10786 +---------------------------------------+
10787 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
10788 +---------------------------------------+
10789 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
10790 +---------------------------------------+
10791 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
10792 +---------------------------------------+
10793 | SPE: area for 64-bit GP registers |
10794 +---------------------------------------+
10795 | SPE alignment padding |
10796 +---------------------------------------+
10797 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
10798 +---------------------------------------+
10799 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
10800 +---------------------------------------+
10801 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
10802 +---------------------------------------+
10803 old SP->| back chain to caller's caller |
10804 +---------------------------------------+
10806 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
10807 given. (But note below and in sysv4.h that we require only 8 and
10808 may round up the size of our stack frame anyways. The historical
10809 reason is early versions of powerpc-linux which didn't properly
10810 align the stack at program startup. A happy side-effect is that
10811 -mno-eabi libraries can be used with -meabi programs.)
10813 The EABI configuration defaults to the V.4 layout. However,
10814 the stack alignment requirements may differ. If -mno-eabi is not
10815 given, the required stack alignment is 8 bytes; if -mno-eabi is
10816 given, the required alignment is 16 bytes. (But see V.4 comment
10819 #ifndef ABI_STACK_BOUNDARY
10820 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
10823 static rs6000_stack_t *
10824 rs6000_stack_info (void)
10826 static rs6000_stack_t info, zero_info;
10827 rs6000_stack_t *info_ptr = &info;
10828 int reg_size = TARGET_32BIT ? 4 : 8;
10831 HOST_WIDE_INT non_fixed_size;
10833 /* Zero all fields portably. */
10838 /* Cache value so we don't rescan instruction chain over and over. */
10839 if (cfun->machine->insn_chain_scanned_p == 0)
10841 cfun->machine->insn_chain_scanned_p = 1;
10842 info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
10846 /* Select which calling sequence. */
10847 info_ptr->abi = DEFAULT_ABI;
10849 /* Calculate which registers need to be saved & save area size. */
10850 info_ptr->first_gp_reg_save = first_reg_to_save ();
10851 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
10852 even if it currently looks like we won't. */
10853 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
10854 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
10855 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
10856 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
10857 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
10859 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
10861 /* For the SPE, we have an additional upper 32-bits on each GPR.
10862 Ideally we should save the entire 64-bits only when the upper
10863 half is used in SIMD instructions. Since we only record
10864 registers live (not the size they are used in), this proves
10865 difficult because we'd have to traverse the instruction chain at
10866 the right time, taking reload into account. This is a real pain,
10867 so we opt to save the GPRs in 64-bits always if but one register
10868 gets used in 64-bits. Otherwise, all the registers in the frame
10869 get saved in 32-bits.
10871 So... since when we save all GPRs (except the SP) in 64-bits, the
10872 traditional GP save area will be empty. */
10873 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10874 info_ptr->gp_size = 0;
10876 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
10877 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
10879 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
10880 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
10881 - info_ptr->first_altivec_reg_save);
10883 /* Does this function call anything? */
10884 info_ptr->calls_p = (! current_function_is_leaf
10885 || cfun->machine->ra_needs_full_frame);
10887 /* Determine if we need to save the link register. */
10888 if (rs6000_ra_ever_killed ()
10889 || (DEFAULT_ABI == ABI_AIX
10890 && current_function_profile
10891 && !TARGET_PROFILE_KERNEL)
10892 #ifdef TARGET_RELOCATABLE
10893 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
10895 || (info_ptr->first_fp_reg_save != 64
10896 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
10897 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
10898 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
10899 || (DEFAULT_ABI == ABI_DARWIN
10901 && current_function_uses_pic_offset_table)
10902 || info_ptr->calls_p)
10904 info_ptr->lr_save_p = 1;
10905 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
10908 /* Determine if we need to save the condition code registers. */
10909 if (regs_ever_live[CR2_REGNO]
10910 || regs_ever_live[CR3_REGNO]
10911 || regs_ever_live[CR4_REGNO])
10913 info_ptr->cr_save_p = 1;
10914 if (DEFAULT_ABI == ABI_V4)
10915 info_ptr->cr_size = reg_size;
10918 /* If the current function calls __builtin_eh_return, then we need
10919 to allocate stack space for registers that will hold data for
10920 the exception handler. */
10921 if (current_function_calls_eh_return)
10924 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
10927 /* SPE saves EH registers in 64-bits. */
10928 ehrd_size = i * (TARGET_SPE_ABI
10929 && info_ptr->spe_64bit_regs_used != 0
10930 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
10935 /* Determine various sizes. */
10936 info_ptr->reg_size = reg_size;
10937 info_ptr->fixed_size = RS6000_SAVE_AREA;
10938 info_ptr->varargs_size = RS6000_VARARGS_AREA;
10939 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
10940 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
10941 TARGET_ALTIVEC ? 16 : 8);
10943 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10944 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
10946 info_ptr->spe_gp_size = 0;
10948 if (TARGET_ALTIVEC_ABI)
10949 info_ptr->vrsave_mask = compute_vrsave_mask ();
10951 info_ptr->vrsave_mask = 0;
10953 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
10954 info_ptr->vrsave_size = 4;
10956 info_ptr->vrsave_size = 0;
10958 /* Calculate the offsets. */
10959 switch (DEFAULT_ABI)
10967 info_ptr->fp_save_offset = - info_ptr->fp_size;
10968 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
10970 if (TARGET_ALTIVEC_ABI)
10972 info_ptr->vrsave_save_offset
10973 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
10975 /* Align stack so vector save area is on a quadword boundary. */
10976 if (info_ptr->altivec_size != 0)
10977 info_ptr->altivec_padding_size
10978 = 16 - (-info_ptr->vrsave_save_offset % 16);
10980 info_ptr->altivec_padding_size = 0;
10982 info_ptr->altivec_save_offset
10983 = info_ptr->vrsave_save_offset
10984 - info_ptr->altivec_padding_size
10985 - info_ptr->altivec_size;
10987 /* Adjust for AltiVec case. */
10988 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
10991 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
10992 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
10993 info_ptr->lr_save_offset = 2*reg_size;
10997 info_ptr->fp_save_offset = - info_ptr->fp_size;
10998 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
10999 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
11001 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
11003 /* Align stack so SPE GPR save area is aligned on a
11004 double-word boundary. */
11005 if (info_ptr->spe_gp_size != 0)
11006 info_ptr->spe_padding_size
11007 = 8 - (-info_ptr->cr_save_offset % 8);
11009 info_ptr->spe_padding_size = 0;
11011 info_ptr->spe_gp_save_offset
11012 = info_ptr->cr_save_offset
11013 - info_ptr->spe_padding_size
11014 - info_ptr->spe_gp_size;
11016 /* Adjust for SPE case. */
11017 info_ptr->toc_save_offset
11018 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
11020 else if (TARGET_ALTIVEC_ABI)
11022 info_ptr->vrsave_save_offset
11023 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
11025 /* Align stack so vector save area is on a quadword boundary. */
11026 if (info_ptr->altivec_size != 0)
11027 info_ptr->altivec_padding_size
11028 = 16 - (-info_ptr->vrsave_save_offset % 16);
11030 info_ptr->altivec_padding_size = 0;
11032 info_ptr->altivec_save_offset
11033 = info_ptr->vrsave_save_offset
11034 - info_ptr->altivec_padding_size
11035 - info_ptr->altivec_size;
11037 /* Adjust for AltiVec case. */
11038 info_ptr->toc_save_offset
11039 = info_ptr->altivec_save_offset - info_ptr->toc_size;
11042 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
11043 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
11044 info_ptr->lr_save_offset = reg_size;
11048 save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
11049 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
11050 + info_ptr->gp_size
11051 + info_ptr->altivec_size
11052 + info_ptr->altivec_padding_size
11053 + info_ptr->spe_gp_size
11054 + info_ptr->spe_padding_size
11056 + info_ptr->cr_size
11057 + info_ptr->lr_size
11058 + info_ptr->vrsave_size
11059 + info_ptr->toc_size,
11062 non_fixed_size = (info_ptr->vars_size
11063 + info_ptr->parm_size
11064 + info_ptr->save_size
11065 + info_ptr->varargs_size);
11067 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
11068 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
11070 /* Determine if we need to allocate any stack frame:
11072 For AIX we need to push the stack if a frame pointer is needed
11073 (because the stack might be dynamically adjusted), if we are
11074 debugging, if we make calls, or if the sum of fp_save, gp_save,
11075 and local variables are more than the space needed to save all
11076 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
11077 + 18*8 = 288 (GPR13 reserved).
11079 For V.4 we don't have the stack cushion that AIX uses, but assume
11080 that the debugger can handle stackless frames. */
11082 if (info_ptr->calls_p)
11083 info_ptr->push_p = 1;
11085 else if (DEFAULT_ABI == ABI_V4)
11086 info_ptr->push_p = non_fixed_size != 0;
11088 else if (frame_pointer_needed)
11089 info_ptr->push_p = 1;
11091 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
11092 info_ptr->push_p = 1;
11095 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
11097 /* Zero offsets if we're not saving those registers. */
11098 if (info_ptr->fp_size == 0)
11099 info_ptr->fp_save_offset = 0;
11101 if (info_ptr->gp_size == 0)
11102 info_ptr->gp_save_offset = 0;
11104 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
11105 info_ptr->altivec_save_offset = 0;
11107 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
11108 info_ptr->vrsave_save_offset = 0;
11110 if (! TARGET_SPE_ABI
11111 || info_ptr->spe_64bit_regs_used == 0
11112 || info_ptr->spe_gp_size == 0)
11113 info_ptr->spe_gp_save_offset = 0;
11115 if (! info_ptr->lr_save_p)
11116 info_ptr->lr_save_offset = 0;
11118 if (! info_ptr->cr_save_p)
11119 info_ptr->cr_save_offset = 0;
11121 if (! info_ptr->toc_save_p)
11122 info_ptr->toc_save_offset = 0;
11127 /* Return true if the current function uses any GPRs in 64-bit SIMD
11131 spe_func_has_64bit_regs_p (void)
11135 /* Functions that save and restore all the call-saved registers will
11136 need to save/restore the registers in 64-bits. */
11137 if (current_function_calls_eh_return
11138 || current_function_calls_setjmp
11139 || current_function_has_nonlocal_goto)
11142 insns = get_insns ();
11144 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
11150 i = PATTERN (insn);
11151 if (GET_CODE (i) == SET
11152 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
11161 debug_stack_info (rs6000_stack_t *info)
11163 const char *abi_string;
11166 info = rs6000_stack_info ();
11168 fprintf (stderr, "\nStack information for function %s:\n",
11169 ((current_function_decl && DECL_NAME (current_function_decl))
11170 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
11175 default: abi_string = "Unknown"; break;
11176 case ABI_NONE: abi_string = "NONE"; break;
11177 case ABI_AIX: abi_string = "AIX"; break;
11178 case ABI_DARWIN: abi_string = "Darwin"; break;
11179 case ABI_V4: abi_string = "V.4"; break;
11182 fprintf (stderr, "\tABI = %5s\n", abi_string);
11184 if (TARGET_ALTIVEC_ABI)
11185 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
11187 if (TARGET_SPE_ABI)
11188 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
11190 if (info->first_gp_reg_save != 32)
11191 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
11193 if (info->first_fp_reg_save != 64)
11194 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
11196 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
11197 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
11198 info->first_altivec_reg_save);
11200 if (info->lr_save_p)
11201 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
11203 if (info->cr_save_p)
11204 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
11206 if (info->toc_save_p)
11207 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
11209 if (info->vrsave_mask)
11210 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
11213 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
11216 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
11218 if (info->gp_save_offset)
11219 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
11221 if (info->fp_save_offset)
11222 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
11224 if (info->altivec_save_offset)
11225 fprintf (stderr, "\taltivec_save_offset = %5d\n",
11226 info->altivec_save_offset);
11228 if (info->spe_gp_save_offset)
11229 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
11230 info->spe_gp_save_offset);
11232 if (info->vrsave_save_offset)
11233 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
11234 info->vrsave_save_offset);
11236 if (info->lr_save_offset)
11237 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
11239 if (info->cr_save_offset)
11240 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
11242 if (info->toc_save_offset)
11243 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
11245 if (info->varargs_save_offset)
11246 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
11248 if (info->total_size)
11249 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
11252 if (info->varargs_size)
11253 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
11255 if (info->vars_size)
11256 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
11259 if (info->parm_size)
11260 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
11262 if (info->fixed_size)
11263 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
11266 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
11268 if (info->spe_gp_size)
11269 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
11272 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
11274 if (info->altivec_size)
11275 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
11277 if (info->vrsave_size)
11278 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
11280 if (info->altivec_padding_size)
11281 fprintf (stderr, "\taltivec_padding_size= %5d\n",
11282 info->altivec_padding_size);
11284 if (info->spe_padding_size)
11285 fprintf (stderr, "\tspe_padding_size = %5d\n",
11286 info->spe_padding_size);
11289 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
11292 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
11294 if (info->toc_size)
11295 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
11297 if (info->save_size)
11298 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
11300 if (info->reg_size != 4)
11301 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
11303 fprintf (stderr, "\n");
11307 rs6000_return_addr (int count, rtx frame)
11309 /* Currently we don't optimize very well between prolog and body
11310 code and for PIC code the code can be actually quite bad, so
11311 don't try to be too clever here. */
11312 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
11314 cfun->machine->ra_needs_full_frame = 1;
11321 plus_constant (copy_to_reg
11322 (gen_rtx_MEM (Pmode,
11323 memory_address (Pmode, frame))),
11324 RETURN_ADDRESS_OFFSET)));
11327 cfun->machine->ra_need_lr = 1;
11328 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
11331 /* Say whether a function is a candidate for sibcall handling or not.
11332 We do not allow indirect calls to be optimized into sibling calls.
11333 Also, we can't do it if there are any vector parameters; there's
11334 nowhere to put the VRsave code so it works; note that functions with
11335 vector parameters are required to have a prototype, so the argument
11336 type info must be available here. (The tail recursion case can work
11337 with vector parameters, but there's no way to distinguish here.) */
11339 rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
11344 if (TARGET_ALTIVEC_VRSAVE)
11346 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
11347 type; type = TREE_CHAIN (type))
11349 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
11353 if (DEFAULT_ABI == ABI_DARWIN
11354 || (*targetm.binds_local_p) (decl))
11356 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
11358 if (!lookup_attribute ("longcall", attr_list)
11359 || lookup_attribute ("shortcall", attr_list))
11367 rs6000_ra_ever_killed (void)
11373 if (current_function_is_thunk)
11376 /* regs_ever_live has LR marked as used if any sibcalls are present,
11377 but this should not force saving and restoring in the
11378 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
11379 clobbers LR, so that is inappropriate. */
11381 /* Also, the prologue can generate a store into LR that
11382 doesn't really count, like this:
11385 bcl to set PIC register
11389 When we're called from the epilogue, we need to avoid counting
11390 this as a store. */
11392 push_topmost_sequence ();
11393 top = get_insns ();
11394 pop_topmost_sequence ();
11395 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11397 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
11401 if (FIND_REG_INC_NOTE (insn, reg))
11403 else if (GET_CODE (insn) == CALL_INSN
11404 && !SIBLING_CALL_P (insn))
11406 else if (set_of (reg, insn) != NULL_RTX
11407 && !prologue_epilogue_contains (insn))
11414 /* Add a REG_MAYBE_DEAD note to the insn. */
11416 rs6000_maybe_dead (rtx insn)
11418 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
11423 /* Emit instructions needed to load the TOC register.
11424 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
11425 a constant pool; or for SVR4 -fpic. */
11428 rs6000_emit_load_toc_table (int fromprolog)
11431 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
11433 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
11435 rtx temp = (fromprolog
11436 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
11437 : gen_reg_rtx (Pmode));
11438 insn = emit_insn (gen_load_toc_v4_pic_si (temp));
11440 rs6000_maybe_dead (insn);
11441 insn = emit_move_insn (dest, temp);
11443 rs6000_maybe_dead (insn);
11445 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
11448 rtx tempLR = (fromprolog
11449 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
11450 : gen_reg_rtx (Pmode));
11451 rtx temp0 = (fromprolog
11452 ? gen_rtx_REG (Pmode, 0)
11453 : gen_reg_rtx (Pmode));
11455 /* possibly create the toc section */
11456 if (! toc_initialized)
11459 function_section (current_function_decl);
11466 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
11467 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11469 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
11470 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11472 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
11474 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
11475 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
11483 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
11484 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, tocsym));
11485 emit_move_insn (dest, tempLR);
11486 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
11488 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
11490 rs6000_maybe_dead (insn);
11492 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
11494 /* This is for AIX code running in non-PIC ELF32. */
11497 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
11498 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11500 insn = emit_insn (gen_elf_high (dest, realsym));
11502 rs6000_maybe_dead (insn);
11503 insn = emit_insn (gen_elf_low (dest, dest, realsym));
11505 rs6000_maybe_dead (insn);
11507 else if (DEFAULT_ABI == ABI_AIX)
11510 insn = emit_insn (gen_load_toc_aix_si (dest));
11512 insn = emit_insn (gen_load_toc_aix_di (dest));
11514 rs6000_maybe_dead (insn);
11520 /* Emit instructions to restore the link register after determining where
11521 its value has been stored. */
11524 rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
11526 rs6000_stack_t *info = rs6000_stack_info ();
11529 operands[0] = source;
11530 operands[1] = scratch;
11532 if (info->lr_save_p)
11534 rtx frame_rtx = stack_pointer_rtx;
11535 HOST_WIDE_INT sp_offset = 0;
11538 if (frame_pointer_needed
11539 || current_function_calls_alloca
11540 || info->total_size > 32767)
11542 emit_move_insn (operands[1], gen_rtx_MEM (Pmode, frame_rtx));
11543 frame_rtx = operands[1];
11545 else if (info->push_p)
11546 sp_offset = info->total_size;
11548 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
11549 tmp = gen_rtx_MEM (Pmode, tmp);
11550 emit_move_insn (tmp, operands[0]);
11553 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM), operands[0]);
11556 static GTY(()) int set = -1;
11559 get_TOC_alias_set (void)
11562 set = new_alias_set ();
11566 /* This returns nonzero if the current function uses the TOC. This is
11567 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
11568 is generated by the ABI_V4 load_toc_* patterns. */
11575 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
11578 rtx pat = PATTERN (insn);
11581 if (GET_CODE (pat) == PARALLEL)
11582 for (i = 0; i < XVECLEN (pat, 0); i++)
11584 rtx sub = XVECEXP (pat, 0, i);
11585 if (GET_CODE (sub) == USE)
11587 sub = XEXP (sub, 0);
11588 if (GET_CODE (sub) == UNSPEC
11589 && XINT (sub, 1) == UNSPEC_TOC)
11599 create_TOC_reference (rtx symbol)
11601 if (no_new_pseudos)
11602 regs_ever_live[TOC_REGISTER] = 1;
11603 return gen_rtx_PLUS (Pmode,
11604 gen_rtx_REG (Pmode, TOC_REGISTER),
11605 gen_rtx_CONST (Pmode,
11606 gen_rtx_MINUS (Pmode, symbol,
11607 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
11610 /* If _Unwind_* has been called from within the same module,
11611 toc register is not guaranteed to be saved to 40(1) on function
11612 entry. Save it there in that case. */
11615 rs6000_aix_emit_builtin_unwind_init (void)
11618 rtx stack_top = gen_reg_rtx (Pmode);
11619 rtx opcode_addr = gen_reg_rtx (Pmode);
11620 rtx opcode = gen_reg_rtx (SImode);
11621 rtx tocompare = gen_reg_rtx (SImode);
11622 rtx no_toc_save_needed = gen_label_rtx ();
11624 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
11625 emit_move_insn (stack_top, mem);
11627 mem = gen_rtx_MEM (Pmode,
11628 gen_rtx_PLUS (Pmode, stack_top,
11629 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
11630 emit_move_insn (opcode_addr, mem);
11631 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
11632 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
11633 : 0xE8410028, SImode));
11635 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
11636 SImode, NULL_RTX, NULL_RTX,
11637 no_toc_save_needed);
11639 mem = gen_rtx_MEM (Pmode,
11640 gen_rtx_PLUS (Pmode, stack_top,
11641 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
11642 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
11643 emit_label (no_toc_save_needed);
11646 /* This ties together stack memory (MEM with an alias set of
11647 rs6000_sr_alias_set) and the change to the stack pointer. */
11650 rs6000_emit_stack_tie (void)
11652 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
11654 set_mem_alias_set (mem, rs6000_sr_alias_set);
11655 emit_insn (gen_stack_tie (mem));
11658 /* Emit the correct code for allocating stack space, as insns.
11659 If COPY_R12, make sure a copy of the old frame is left in r12.
11660 The generated code may use hard register 0 as a temporary. */
11663 rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
11666 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
11667 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
11668 rtx todec = GEN_INT (-size);
11670 if (current_function_limit_stack)
11672 if (REG_P (stack_limit_rtx)
11673 && REGNO (stack_limit_rtx) > 1
11674 && REGNO (stack_limit_rtx) <= 31)
11676 emit_insn (TARGET_32BIT
11677 ? gen_addsi3 (tmp_reg,
11680 : gen_adddi3 (tmp_reg,
11684 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
11687 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
11689 && DEFAULT_ABI == ABI_V4)
11691 rtx toload = gen_rtx_CONST (VOIDmode,
11692 gen_rtx_PLUS (Pmode,
11696 emit_insn (gen_elf_high (tmp_reg, toload));
11697 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
11698 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
11702 warning ("stack limit expression is not supported");
11705 if (copy_r12 || ! TARGET_UPDATE)
11706 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
11712 /* Need a note here so that try_split doesn't get confused. */
11713 if (get_last_insn() == NULL_RTX)
11714 emit_note (NOTE_INSN_DELETED);
11715 insn = emit_move_insn (tmp_reg, todec);
11716 try_split (PATTERN (insn), insn, 0);
11720 insn = emit_insn (TARGET_32BIT
11721 ? gen_movsi_update (stack_reg, stack_reg,
11723 : gen_movdi_update (stack_reg, stack_reg,
11724 todec, stack_reg));
11728 insn = emit_insn (TARGET_32BIT
11729 ? gen_addsi3 (stack_reg, stack_reg, todec)
11730 : gen_adddi3 (stack_reg, stack_reg, todec));
11731 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
11732 gen_rtx_REG (Pmode, 12));
11735 RTX_FRAME_RELATED_P (insn) = 1;
11737 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
11738 gen_rtx_SET (VOIDmode, stack_reg,
11739 gen_rtx_PLUS (Pmode, stack_reg,
11744 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
11745 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
11746 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
11747 deduce these equivalences by itself so it wasn't necessary to hold
11748 its hand so much. */
11751 rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
11752 rtx reg2, rtx rreg)
11756 /* copy_rtx will not make unique copies of registers, so we need to
11757 ensure we don't have unwanted sharing here. */
11759 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
11762 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
11764 real = copy_rtx (PATTERN (insn));
11766 if (reg2 != NULL_RTX)
11767 real = replace_rtx (real, reg2, rreg);
11769 real = replace_rtx (real, reg,
11770 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
11771 STACK_POINTER_REGNUM),
11774 /* We expect that 'real' is either a SET or a PARALLEL containing
11775 SETs (and possibly other stuff). In a PARALLEL, all the SETs
11776 are important so they all have to be marked RTX_FRAME_RELATED_P. */
11778 if (GET_CODE (real) == SET)
11782 temp = simplify_rtx (SET_SRC (set));
11784 SET_SRC (set) = temp;
11785 temp = simplify_rtx (SET_DEST (set));
11787 SET_DEST (set) = temp;
11788 if (GET_CODE (SET_DEST (set)) == MEM)
11790 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
11792 XEXP (SET_DEST (set), 0) = temp;
11795 else if (GET_CODE (real) == PARALLEL)
11798 for (i = 0; i < XVECLEN (real, 0); i++)
11799 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
11801 rtx set = XVECEXP (real, 0, i);
11803 temp = simplify_rtx (SET_SRC (set));
11805 SET_SRC (set) = temp;
11806 temp = simplify_rtx (SET_DEST (set));
11808 SET_DEST (set) = temp;
11809 if (GET_CODE (SET_DEST (set)) == MEM)
11811 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
11813 XEXP (SET_DEST (set), 0) = temp;
11815 RTX_FRAME_RELATED_P (set) = 1;
11822 real = spe_synthesize_frame_save (real);
11824 RTX_FRAME_RELATED_P (insn) = 1;
11825 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
11830 /* Given an SPE frame note, return a PARALLEL of SETs with the
11831 original note, plus a synthetic register save. */
11834 spe_synthesize_frame_save (rtx real)
11836 rtx synth, offset, reg, real2;
11838 if (GET_CODE (real) != SET
11839 || GET_MODE (SET_SRC (real)) != V2SImode)
11842 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
11843 frame related note. The parallel contains a set of the register
11844 being saved, and another set to a synthetic register (n+1200).
11845 This is so we can differentiate between 64-bit and 32-bit saves.
11846 Words cannot describe this nastiness. */
11848 if (GET_CODE (SET_DEST (real)) != MEM
11849 || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
11850 || GET_CODE (SET_SRC (real)) != REG)
11854 (set (mem (plus (reg x) (const y)))
11857 (set (mem (plus (reg x) (const y+4)))
11861 real2 = copy_rtx (real);
11862 PUT_MODE (SET_DEST (real2), SImode);
11863 reg = SET_SRC (real2);
11864 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
11865 synth = copy_rtx (real2);
11867 if (BYTES_BIG_ENDIAN)
11869 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
11870 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
11873 reg = SET_SRC (synth);
11875 synth = replace_rtx (synth, reg,
11876 gen_rtx_REG (SImode, REGNO (reg) + 1200));
11878 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
11879 synth = replace_rtx (synth, offset,
11880 GEN_INT (INTVAL (offset)
11881 + (BYTES_BIG_ENDIAN ? 0 : 4)));
11883 RTX_FRAME_RELATED_P (synth) = 1;
11884 RTX_FRAME_RELATED_P (real2) = 1;
11885 if (BYTES_BIG_ENDIAN)
11886 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
11888 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
11893 /* Returns an insn that has a vrsave set operation with the
11894 appropriate CLOBBERs. */
11897 generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
11900 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
11901 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
11904 = gen_rtx_SET (VOIDmode,
11906 gen_rtx_UNSPEC_VOLATILE (SImode,
11907 gen_rtvec (2, reg, vrsave),
11912 /* We need to clobber the registers in the mask so the scheduler
11913 does not move sets to VRSAVE before sets of AltiVec registers.
11915 However, if the function receives nonlocal gotos, reload will set
11916 all call saved registers live. We will end up with:
11918 (set (reg 999) (mem))
11919 (parallel [ (set (reg vrsave) (unspec blah))
11920 (clobber (reg 999))])
11922 The clobber will cause the store into reg 999 to be dead, and
11923 flow will attempt to delete an epilogue insn. In this case, we
11924 need an unspec use/set of the register. */
11926 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
11927 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11929 if (!epiloguep || call_used_regs [i])
11930 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
11931 gen_rtx_REG (V4SImode, i));
11934 rtx reg = gen_rtx_REG (V4SImode, i);
11937 = gen_rtx_SET (VOIDmode,
11939 gen_rtx_UNSPEC (V4SImode,
11940 gen_rtvec (1, reg), 27));
11944 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
11946 for (i = 0; i < nclobs; ++i)
11947 XVECEXP (insn, 0, i) = clobs[i];
11952 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
11953 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
11956 emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
11957 unsigned int regno, int offset, HOST_WIDE_INT total_size)
11959 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
11960 rtx replacea, replaceb;
11962 int_rtx = GEN_INT (offset);
11964 /* Some cases that need register indexed addressing. */
11965 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
11967 && SPE_VECTOR_MODE (mode)
11968 && !SPE_CONST_OFFSET_OK (offset)))
11970 /* Whomever calls us must make sure r11 is available in the
11971 flow path of instructions in the prologue. */
11972 offset_rtx = gen_rtx_REG (Pmode, 11);
11973 emit_move_insn (offset_rtx, int_rtx);
11975 replacea = offset_rtx;
11976 replaceb = int_rtx;
11980 offset_rtx = int_rtx;
11981 replacea = NULL_RTX;
11982 replaceb = NULL_RTX;
11985 reg = gen_rtx_REG (mode, regno);
11986 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
11987 mem = gen_rtx_MEM (mode, addr);
11988 set_mem_alias_set (mem, rs6000_sr_alias_set);
11990 insn = emit_move_insn (mem, reg);
11992 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
11995 /* Emit an offset memory reference suitable for a frame store, while
11996 converting to a valid addressing mode. */
11999 gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
12001 rtx int_rtx, offset_rtx;
12003 int_rtx = GEN_INT (offset);
12005 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
12007 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
12008 emit_move_insn (offset_rtx, int_rtx);
12011 offset_rtx = int_rtx;
12013 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
12016 /* Emit function prologue as insns. */
12019 rs6000_emit_prologue (void)
12021 rs6000_stack_t *info = rs6000_stack_info ();
12022 enum machine_mode reg_mode = Pmode;
12023 int reg_size = TARGET_32BIT ? 4 : 8;
12024 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
12025 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
12026 rtx frame_reg_rtx = sp_reg_rtx;
12027 rtx cr_save_rtx = NULL_RTX;
12029 int saving_FPRs_inline;
12030 int using_store_multiple;
12031 HOST_WIDE_INT sp_offset = 0;
12033 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12035 reg_mode = V2SImode;
12039 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
12040 && (!TARGET_SPE_ABI
12041 || info->spe_64bit_regs_used == 0)
12042 && info->first_gp_reg_save < 31);
12043 saving_FPRs_inline = (info->first_fp_reg_save == 64
12044 || FP_SAVE_INLINE (info->first_fp_reg_save)
12045 || current_function_calls_eh_return
12046 || cfun->machine->ra_need_lr);
12048 /* For V.4, update stack before we do any saving and set back pointer. */
12050 && (DEFAULT_ABI == ABI_V4
12051 || current_function_calls_eh_return))
12053 if (info->total_size < 32767)
12054 sp_offset = info->total_size;
12056 frame_reg_rtx = frame_ptr_rtx;
12057 rs6000_emit_allocate_stack (info->total_size,
12058 (frame_reg_rtx != sp_reg_rtx
12059 && (info->cr_save_p
12061 || info->first_fp_reg_save < 64
12062 || info->first_gp_reg_save < 32
12064 if (frame_reg_rtx != sp_reg_rtx)
12065 rs6000_emit_stack_tie ();
12068 /* Save AltiVec registers if needed. */
12069 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
12073 /* There should be a non inline version of this, for when we
12074 are saving lots of vector registers. */
12075 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
12076 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
12078 rtx areg, savereg, mem;
12081 offset = info->altivec_save_offset + sp_offset
12082 + 16 * (i - info->first_altivec_reg_save);
12084 savereg = gen_rtx_REG (V4SImode, i);
12086 areg = gen_rtx_REG (Pmode, 0);
12087 emit_move_insn (areg, GEN_INT (offset));
12089 /* AltiVec addressing mode is [reg+reg]. */
12090 mem = gen_rtx_MEM (V4SImode,
12091 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
12093 set_mem_alias_set (mem, rs6000_sr_alias_set);
12095 insn = emit_move_insn (mem, savereg);
12097 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12098 areg, GEN_INT (offset));
12102 /* VRSAVE is a bit vector representing which AltiVec registers
12103 are used. The OS uses this to determine which vector
12104 registers to save on a context switch. We need to save
12105 VRSAVE on the stack frame, add whatever AltiVec registers we
12106 used in this function, and do the corresponding magic in the
12109 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
12110 && info->vrsave_mask != 0)
12112 rtx reg, mem, vrsave;
12115 /* Get VRSAVE onto a GPR. Note that ABI_V4 might be using r12
12116 as frame_reg_rtx and r11 as the static chain pointer for
12117 nested functions. */
12118 reg = gen_rtx_REG (SImode, 0);
12119 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
12121 emit_insn (gen_get_vrsave_internal (reg));
12123 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
12126 offset = info->vrsave_save_offset + sp_offset;
12128 = gen_rtx_MEM (SImode,
12129 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
12130 set_mem_alias_set (mem, rs6000_sr_alias_set);
12131 insn = emit_move_insn (mem, reg);
12133 /* Include the registers in the mask. */
12134 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
12136 insn = emit_insn (generate_set_vrsave (reg, info, 0));
12139 /* If we use the link register, get it into r0. */
12140 if (info->lr_save_p)
12141 emit_move_insn (gen_rtx_REG (Pmode, 0),
12142 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
12144 /* If we need to save CR, put it into r12. */
12145 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
12147 cr_save_rtx = gen_rtx_REG (SImode, 12);
12148 emit_insn (gen_movesi_from_cr (cr_save_rtx));
12151 /* Do any required saving of fpr's. If only one or two to save, do
12152 it ourselves. Otherwise, call function. */
12153 if (saving_FPRs_inline)
12156 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12157 if ((regs_ever_live[info->first_fp_reg_save+i]
12158 && ! call_used_regs[info->first_fp_reg_save+i]))
12159 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
12160 info->first_fp_reg_save + i,
12161 info->fp_save_offset + sp_offset + 8 * i,
12164 else if (info->first_fp_reg_save != 64)
12168 const char *alloc_rname;
12170 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
12172 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
12173 gen_rtx_REG (Pmode,
12174 LINK_REGISTER_REGNUM));
12175 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
12176 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
12177 alloc_rname = ggc_strdup (rname);
12178 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
12179 gen_rtx_SYMBOL_REF (Pmode,
12181 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12183 rtx addr, reg, mem;
12184 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
12185 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12186 GEN_INT (info->fp_save_offset
12187 + sp_offset + 8*i));
12188 mem = gen_rtx_MEM (DFmode, addr);
12189 set_mem_alias_set (mem, rs6000_sr_alias_set);
12191 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
12193 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12194 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12195 NULL_RTX, NULL_RTX);
12198 /* Save GPRs. This is done as a PARALLEL if we are using
12199 the store-multiple instructions. */
12200 if (using_store_multiple)
12204 p = rtvec_alloc (32 - info->first_gp_reg_save);
12205 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12207 rtx addr, reg, mem;
12208 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
12209 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12210 GEN_INT (info->gp_save_offset
12213 mem = gen_rtx_MEM (reg_mode, addr);
12214 set_mem_alias_set (mem, rs6000_sr_alias_set);
12216 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
12218 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12219 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12220 NULL_RTX, NULL_RTX);
12225 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12226 if ((regs_ever_live[info->first_gp_reg_save+i]
12227 && (! call_used_regs[info->first_gp_reg_save+i]
12228 || (i+info->first_gp_reg_save
12229 == RS6000_PIC_OFFSET_TABLE_REGNUM
12230 && TARGET_TOC && TARGET_MINIMAL_TOC)))
12231 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
12232 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
12233 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
12235 rtx addr, reg, mem;
12236 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
12238 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12240 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
12243 if (!SPE_CONST_OFFSET_OK (offset))
12245 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
12246 emit_move_insn (b, GEN_INT (offset));
12249 b = GEN_INT (offset);
12251 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
12252 mem = gen_rtx_MEM (V2SImode, addr);
12253 set_mem_alias_set (mem, rs6000_sr_alias_set);
12254 insn = emit_move_insn (mem, reg);
12256 if (GET_CODE (b) == CONST_INT)
12257 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12258 NULL_RTX, NULL_RTX);
12260 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12261 b, GEN_INT (offset));
12265 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12266 GEN_INT (info->gp_save_offset
12269 mem = gen_rtx_MEM (reg_mode, addr);
12270 set_mem_alias_set (mem, rs6000_sr_alias_set);
12272 insn = emit_move_insn (mem, reg);
12273 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12274 NULL_RTX, NULL_RTX);
12279 /* ??? There's no need to emit actual instructions here, but it's the
12280 easiest way to get the frame unwind information emitted. */
12281 if (current_function_calls_eh_return)
12283 unsigned int i, regno;
12285 /* In AIX ABI we need to pretend we save r2 here. */
12288 rtx addr, reg, mem;
12290 reg = gen_rtx_REG (reg_mode, 2);
12291 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12292 GEN_INT (sp_offset + 5 * reg_size));
12293 mem = gen_rtx_MEM (reg_mode, addr);
12294 set_mem_alias_set (mem, rs6000_sr_alias_set);
12296 insn = emit_move_insn (mem, reg);
12297 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12298 NULL_RTX, NULL_RTX);
12299 PATTERN (insn) = gen_blockage ();
12304 regno = EH_RETURN_DATA_REGNO (i);
12305 if (regno == INVALID_REGNUM)
12308 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
12309 info->ehrd_offset + sp_offset
12310 + reg_size * (int) i,
12315 /* Save lr if we used it. */
12316 if (info->lr_save_p)
12318 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12319 GEN_INT (info->lr_save_offset + sp_offset));
12320 rtx reg = gen_rtx_REG (Pmode, 0);
12321 rtx mem = gen_rtx_MEM (Pmode, addr);
12322 /* This should not be of rs6000_sr_alias_set, because of
12323 __builtin_return_address. */
12325 insn = emit_move_insn (mem, reg);
12326 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12327 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
12330 /* Save CR if we use any that must be preserved. */
12331 if (info->cr_save_p)
12333 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12334 GEN_INT (info->cr_save_offset + sp_offset));
12335 rtx mem = gen_rtx_MEM (SImode, addr);
12337 set_mem_alias_set (mem, rs6000_sr_alias_set);
12339 /* If r12 was used to hold the original sp, copy cr into r0 now
12341 if (REGNO (frame_reg_rtx) == 12)
12343 cr_save_rtx = gen_rtx_REG (SImode, 0);
12344 emit_insn (gen_movesi_from_cr (cr_save_rtx));
12346 insn = emit_move_insn (mem, cr_save_rtx);
12348 /* Now, there's no way that dwarf2out_frame_debug_expr is going
12349 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
12350 But that's OK. All we have to do is specify that _one_ condition
12351 code register is saved in this stack slot. The thrower's epilogue
12352 will then restore all the call-saved registers.
12353 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
12354 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12355 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
12358 /* Update stack and set back pointer unless this is V.4,
12359 for which it was done previously. */
12361 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
12362 rs6000_emit_allocate_stack (info->total_size, FALSE);
12364 /* Set frame pointer, if needed. */
12365 if (frame_pointer_needed)
12367 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
12369 RTX_FRAME_RELATED_P (insn) = 1;
12372 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
12373 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
12374 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
12375 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
12377 /* If emit_load_toc_table will use the link register, we need to save
12378 it. We use R12 for this purpose because emit_load_toc_table
12379 can use register 0. This allows us to use a plain 'blr' to return
12380 from the procedure more often. */
12381 int save_LR_around_toc_setup = (TARGET_ELF
12382 && DEFAULT_ABI != ABI_AIX
12384 && ! info->lr_save_p
12385 && EXIT_BLOCK_PTR->pred != NULL);
12386 if (save_LR_around_toc_setup)
12388 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
12389 rs6000_maybe_dead (emit_move_insn (frame_ptr_rtx, lr));
12390 rs6000_emit_load_toc_table (TRUE);
12391 rs6000_maybe_dead (emit_move_insn (lr, frame_ptr_rtx));
12394 rs6000_emit_load_toc_table (TRUE);
12398 if (DEFAULT_ABI == ABI_DARWIN
12399 && flag_pic && current_function_uses_pic_offset_table)
12401 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
12402 const char *picbase = machopic_function_base_name ();
12403 rtx src = gen_rtx_SYMBOL_REF (Pmode, picbase);
12405 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
12407 rs6000_maybe_dead (
12408 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
12409 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
12414 /* Write function prologue. */
12417 rs6000_output_function_prologue (FILE *file,
12418 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
12420 rs6000_stack_t *info = rs6000_stack_info ();
12422 if (TARGET_DEBUG_STACK)
12423 debug_stack_info (info);
12425 /* Write .extern for any function we will call to save and restore
12427 if (info->first_fp_reg_save < 64
12428 && !FP_SAVE_INLINE (info->first_fp_reg_save))
12429 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
12430 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
12431 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
12432 RESTORE_FP_SUFFIX);
12434 /* Write .extern for AIX common mode routines, if needed. */
12435 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
12437 fputs ("\t.extern __mulh\n", file);
12438 fputs ("\t.extern __mull\n", file);
12439 fputs ("\t.extern __divss\n", file);
12440 fputs ("\t.extern __divus\n", file);
12441 fputs ("\t.extern __quoss\n", file);
12442 fputs ("\t.extern __quous\n", file);
12443 common_mode_defined = 1;
12446 if (! HAVE_prologue)
12450 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
12451 the "toplevel" insn chain. */
12452 emit_note (NOTE_INSN_DELETED);
12453 rs6000_emit_prologue ();
12454 emit_note (NOTE_INSN_DELETED);
12456 /* Expand INSN_ADDRESSES so final() doesn't crash. */
12460 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
12462 INSN_ADDRESSES_NEW (insn, addr);
12467 if (TARGET_DEBUG_STACK)
12468 debug_rtx_list (get_insns (), 100);
12469 final (get_insns (), file, FALSE, FALSE);
12473 rs6000_pic_labelno++;
12476 /* Emit function epilogue as insns.
12478 At present, dwarf2out_frame_debug_expr doesn't understand
12479 register restores, so we don't bother setting RTX_FRAME_RELATED_P
12480 anywhere in the epilogue. Most of the insns below would in any case
12481 need special notes to explain where r11 is in relation to the stack. */
12484 rs6000_emit_epilogue (int sibcall)
12486 rs6000_stack_t *info;
12487 int restoring_FPRs_inline;
12488 int using_load_multiple;
12489 int using_mfcr_multiple;
12490 int use_backchain_to_restore_sp;
12492 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
12493 rtx frame_reg_rtx = sp_reg_rtx;
12494 enum machine_mode reg_mode = Pmode;
12495 int reg_size = TARGET_32BIT ? 4 : 8;
12498 info = rs6000_stack_info ();
12500 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12502 reg_mode = V2SImode;
12506 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
12507 && (!TARGET_SPE_ABI
12508 || info->spe_64bit_regs_used == 0)
12509 && info->first_gp_reg_save < 31);
12510 restoring_FPRs_inline = (sibcall
12511 || current_function_calls_eh_return
12512 || info->first_fp_reg_save == 64
12513 || FP_SAVE_INLINE (info->first_fp_reg_save));
12514 use_backchain_to_restore_sp = (frame_pointer_needed
12515 || current_function_calls_alloca
12516 || info->total_size > 32767);
12517 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
12518 || rs6000_cpu == PROCESSOR_PPC603
12519 || rs6000_cpu == PROCESSOR_PPC750
12522 /* If we have a frame pointer, a call to alloca, or a large stack
12523 frame, restore the old stack pointer using the backchain. Otherwise,
12524 we know what size to update it with. */
12525 if (use_backchain_to_restore_sp)
12527 /* Under V.4, don't reset the stack pointer until after we're done
12528 loading the saved registers. */
12529 if (DEFAULT_ABI == ABI_V4)
12530 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
12532 emit_move_insn (frame_reg_rtx,
12533 gen_rtx_MEM (Pmode, sp_reg_rtx));
12536 else if (info->push_p)
12538 if (DEFAULT_ABI == ABI_V4
12539 || current_function_calls_eh_return)
12540 sp_offset = info->total_size;
12543 emit_insn (TARGET_32BIT
12544 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
12545 GEN_INT (info->total_size))
12546 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
12547 GEN_INT (info->total_size)));
12551 /* Restore AltiVec registers if needed. */
12552 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
12556 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
12557 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
12559 rtx addr, areg, mem;
12561 areg = gen_rtx_REG (Pmode, 0);
12563 (areg, GEN_INT (info->altivec_save_offset
12565 + 16 * (i - info->first_altivec_reg_save)));
12567 /* AltiVec addressing mode is [reg+reg]. */
12568 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
12569 mem = gen_rtx_MEM (V4SImode, addr);
12570 set_mem_alias_set (mem, rs6000_sr_alias_set);
12572 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
12576 /* Restore VRSAVE if needed. */
12577 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
12578 && info->vrsave_mask != 0)
12580 rtx addr, mem, reg;
12582 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12583 GEN_INT (info->vrsave_save_offset + sp_offset));
12584 mem = gen_rtx_MEM (SImode, addr);
12585 set_mem_alias_set (mem, rs6000_sr_alias_set);
12586 reg = gen_rtx_REG (SImode, 12);
12587 emit_move_insn (reg, mem);
12589 emit_insn (generate_set_vrsave (reg, info, 1));
12592 /* Get the old lr if we saved it. */
12593 if (info->lr_save_p)
12595 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
12596 info->lr_save_offset + sp_offset);
12598 set_mem_alias_set (mem, rs6000_sr_alias_set);
12600 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
12603 /* Get the old cr if we saved it. */
12604 if (info->cr_save_p)
12606 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12607 GEN_INT (info->cr_save_offset + sp_offset));
12608 rtx mem = gen_rtx_MEM (SImode, addr);
12610 set_mem_alias_set (mem, rs6000_sr_alias_set);
12612 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
12615 /* Set LR here to try to overlap restores below. */
12616 if (info->lr_save_p)
12617 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
12618 gen_rtx_REG (Pmode, 0));
12620 /* Load exception handler data registers, if needed. */
12621 if (current_function_calls_eh_return)
12623 unsigned int i, regno;
12627 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12628 GEN_INT (sp_offset + 5 * reg_size));
12629 rtx mem = gen_rtx_MEM (reg_mode, addr);
12631 set_mem_alias_set (mem, rs6000_sr_alias_set);
12633 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
12640 regno = EH_RETURN_DATA_REGNO (i);
12641 if (regno == INVALID_REGNUM)
12644 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
12645 info->ehrd_offset + sp_offset
12646 + reg_size * (int) i);
12647 set_mem_alias_set (mem, rs6000_sr_alias_set);
12649 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
12653 /* Restore GPRs. This is done as a PARALLEL if we are using
12654 the load-multiple instructions. */
12655 if (using_load_multiple)
12658 p = rtvec_alloc (32 - info->first_gp_reg_save);
12659 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12661 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12662 GEN_INT (info->gp_save_offset
12665 rtx mem = gen_rtx_MEM (reg_mode, addr);
12667 set_mem_alias_set (mem, rs6000_sr_alias_set);
12670 gen_rtx_SET (VOIDmode,
12671 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
12674 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12677 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12678 if ((regs_ever_live[info->first_gp_reg_save+i]
12679 && (! call_used_regs[info->first_gp_reg_save+i]
12680 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
12681 && TARGET_TOC && TARGET_MINIMAL_TOC)))
12682 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
12683 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
12684 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
12686 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12687 GEN_INT (info->gp_save_offset
12690 rtx mem = gen_rtx_MEM (reg_mode, addr);
12692 /* Restore 64-bit quantities for SPE. */
12693 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12695 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
12698 if (!SPE_CONST_OFFSET_OK (offset))
12700 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
12701 emit_move_insn (b, GEN_INT (offset));
12704 b = GEN_INT (offset);
12706 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
12707 mem = gen_rtx_MEM (V2SImode, addr);
12710 set_mem_alias_set (mem, rs6000_sr_alias_set);
12712 emit_move_insn (gen_rtx_REG (reg_mode,
12713 info->first_gp_reg_save + i), mem);
12716 /* Restore fpr's if we need to do it without calling a function. */
12717 if (restoring_FPRs_inline)
12718 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12719 if ((regs_ever_live[info->first_fp_reg_save+i]
12720 && ! call_used_regs[info->first_fp_reg_save+i]))
12723 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12724 GEN_INT (info->fp_save_offset
12727 mem = gen_rtx_MEM (DFmode, addr);
12728 set_mem_alias_set (mem, rs6000_sr_alias_set);
12730 emit_move_insn (gen_rtx_REG (DFmode,
12731 info->first_fp_reg_save + i),
12735 /* If we saved cr, restore it here. Just those that were used. */
12736 if (info->cr_save_p)
12738 rtx r12_rtx = gen_rtx_REG (SImode, 12);
12741 if (using_mfcr_multiple)
12743 for (i = 0; i < 8; i++)
12744 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12750 if (using_mfcr_multiple && count > 1)
12755 p = rtvec_alloc (count);
12758 for (i = 0; i < 8; i++)
12759 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12761 rtvec r = rtvec_alloc (2);
12762 RTVEC_ELT (r, 0) = r12_rtx;
12763 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
12764 RTVEC_ELT (p, ndx) =
12765 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
12766 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
12769 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12774 for (i = 0; i < 8; i++)
12775 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12777 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
12783 /* If this is V.4, unwind the stack pointer after all of the loads
12784 have been done. We need to emit a block here so that sched
12785 doesn't decide to move the sp change before the register restores
12786 (which may not have any obvious dependency on the stack). This
12787 doesn't hurt performance, because there is no scheduling that can
12788 be done after this point. */
12789 if (DEFAULT_ABI == ABI_V4
12790 || current_function_calls_eh_return)
12792 if (frame_reg_rtx != sp_reg_rtx)
12793 rs6000_emit_stack_tie ();
12795 if (use_backchain_to_restore_sp)
12797 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
12799 else if (sp_offset != 0)
12801 emit_insn (TARGET_32BIT
12802 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
12803 GEN_INT (sp_offset))
12804 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
12805 GEN_INT (sp_offset)));
12809 if (current_function_calls_eh_return)
12811 rtx sa = EH_RETURN_STACKADJ_RTX;
12812 emit_insn (TARGET_32BIT
12813 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
12814 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
12820 if (! restoring_FPRs_inline)
12821 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
12823 p = rtvec_alloc (2);
12825 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
12826 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
12827 gen_rtx_REG (Pmode,
12828 LINK_REGISTER_REGNUM));
12830 /* If we have to restore more than two FP registers, branch to the
12831 restore function. It will return to our caller. */
12832 if (! restoring_FPRs_inline)
12836 const char *alloc_rname;
12838 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
12839 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
12840 alloc_rname = ggc_strdup (rname);
12841 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
12842 gen_rtx_SYMBOL_REF (Pmode,
12845 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12848 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
12849 GEN_INT (info->fp_save_offset + 8*i));
12850 mem = gen_rtx_MEM (DFmode, addr);
12851 set_mem_alias_set (mem, rs6000_sr_alias_set);
12853 RTVEC_ELT (p, i+3) =
12854 gen_rtx_SET (VOIDmode,
12855 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
12860 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
12864 /* Write function epilogue. */
12867 rs6000_output_function_epilogue (FILE *file,
12868 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
12870 rs6000_stack_t *info = rs6000_stack_info ();
12872 if (! HAVE_epilogue)
12874 rtx insn = get_last_insn ();
12875 /* If the last insn was a BARRIER, we don't have to write anything except
12876 the trace table. */
12877 if (GET_CODE (insn) == NOTE)
12878 insn = prev_nonnote_insn (insn);
12879 if (insn == 0 || GET_CODE (insn) != BARRIER)
12881 /* This is slightly ugly, but at least we don't have two
12882 copies of the epilogue-emitting code. */
12885 /* A NOTE_INSN_DELETED is supposed to be at the start
12886 and end of the "toplevel" insn chain. */
12887 emit_note (NOTE_INSN_DELETED);
12888 rs6000_emit_epilogue (FALSE);
12889 emit_note (NOTE_INSN_DELETED);
12891 /* Expand INSN_ADDRESSES so final() doesn't crash. */
12895 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
12897 INSN_ADDRESSES_NEW (insn, addr);
12902 if (TARGET_DEBUG_STACK)
12903 debug_rtx_list (get_insns (), 100);
12904 final (get_insns (), file, FALSE, FALSE);
12910 macho_branch_islands ();
12911 /* Mach-O doesn't support labels at the end of objects, so if
12912 it looks like we might want one, insert a NOP. */
12914 rtx insn = get_last_insn ();
12917 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
12918 insn = PREV_INSN (insn);
12922 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
12923 fputs ("\tnop\n", file);
12927 /* Output a traceback table here. See /usr/include/sys/debug.h for info
12930 We don't output a traceback table if -finhibit-size-directive was
12931 used. The documentation for -finhibit-size-directive reads
12932 ``don't output a @code{.size} assembler directive, or anything
12933 else that would cause trouble if the function is split in the
12934 middle, and the two halves are placed at locations far apart in
12935 memory.'' The traceback table has this property, since it
12936 includes the offset from the start of the function to the
12937 traceback table itself.
12939 System V.4 Powerpc's (and the embedded ABI derived from it) use a
12940 different traceback table. */
12941 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
12942 && rs6000_traceback != traceback_none)
12944 const char *fname = NULL;
12945 const char *language_string = lang_hooks.name;
12946 int fixed_parms = 0, float_parms = 0, parm_info = 0;
12948 int optional_tbtab;
12950 if (rs6000_traceback == traceback_full)
12951 optional_tbtab = 1;
12952 else if (rs6000_traceback == traceback_part)
12953 optional_tbtab = 0;
12955 optional_tbtab = !optimize_size && !TARGET_ELF;
12957 if (optional_tbtab)
12959 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
12960 while (*fname == '.') /* V.4 encodes . in the name */
12963 /* Need label immediately before tbtab, so we can compute
12964 its offset from the function start. */
12965 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12966 ASM_OUTPUT_LABEL (file, fname);
12969 /* The .tbtab pseudo-op can only be used for the first eight
12970 expressions, since it can't handle the possibly variable
12971 length fields that follow. However, if you omit the optional
12972 fields, the assembler outputs zeros for all optional fields
12973 anyways, giving each variable length field is minimum length
12974 (as defined in sys/debug.h). Thus we can not use the .tbtab
12975 pseudo-op at all. */
12977 /* An all-zero word flags the start of the tbtab, for debuggers
12978 that have to find it by searching forward from the entry
12979 point or from the current pc. */
12980 fputs ("\t.long 0\n", file);
12982 /* Tbtab format type. Use format type 0. */
12983 fputs ("\t.byte 0,", file);
12985 /* Language type. Unfortunately, there does not seem to be any
12986 official way to discover the language being compiled, so we
12987 use language_string.
12988 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
12989 Java is 13. Objective-C is 14. */
12990 if (! strcmp (language_string, "GNU C"))
12992 else if (! strcmp (language_string, "GNU F77"))
12994 else if (! strcmp (language_string, "GNU Pascal"))
12996 else if (! strcmp (language_string, "GNU Ada"))
12998 else if (! strcmp (language_string, "GNU C++"))
13000 else if (! strcmp (language_string, "GNU Java"))
13002 else if (! strcmp (language_string, "GNU Objective-C"))
13006 fprintf (file, "%d,", i);
13008 /* 8 single bit fields: global linkage (not set for C extern linkage,
13009 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
13010 from start of procedure stored in tbtab, internal function, function
13011 has controlled storage, function has no toc, function uses fp,
13012 function logs/aborts fp operations. */
13013 /* Assume that fp operations are used if any fp reg must be saved. */
13014 fprintf (file, "%d,",
13015 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
13017 /* 6 bitfields: function is interrupt handler, name present in
13018 proc table, function calls alloca, on condition directives
13019 (controls stack walks, 3 bits), saves condition reg, saves
13021 /* The `function calls alloca' bit seems to be set whenever reg 31 is
13022 set up as a frame pointer, even when there is no alloca call. */
13023 fprintf (file, "%d,",
13024 ((optional_tbtab << 6)
13025 | ((optional_tbtab & frame_pointer_needed) << 5)
13026 | (info->cr_save_p << 1)
13027 | (info->lr_save_p)));
13029 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
13031 fprintf (file, "%d,",
13032 (info->push_p << 7) | (64 - info->first_fp_reg_save));
13034 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
13035 fprintf (file, "%d,", (32 - first_reg_to_save ()));
13037 if (optional_tbtab)
13039 /* Compute the parameter info from the function decl argument
13042 int next_parm_info_bit = 31;
13044 for (decl = DECL_ARGUMENTS (current_function_decl);
13045 decl; decl = TREE_CHAIN (decl))
13047 rtx parameter = DECL_INCOMING_RTL (decl);
13048 enum machine_mode mode = GET_MODE (parameter);
13050 if (GET_CODE (parameter) == REG)
13052 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
13058 if (mode == SFmode)
13060 else if (mode == DFmode || mode == TFmode)
13065 /* If only one bit will fit, don't or in this entry. */
13066 if (next_parm_info_bit > 0)
13067 parm_info |= (bits << (next_parm_info_bit - 1));
13068 next_parm_info_bit -= 2;
13072 fixed_parms += ((GET_MODE_SIZE (mode)
13073 + (UNITS_PER_WORD - 1))
13075 next_parm_info_bit -= 1;
13081 /* Number of fixed point parameters. */
13082 /* This is actually the number of words of fixed point parameters; thus
13083 an 8 byte struct counts as 2; and thus the maximum value is 8. */
13084 fprintf (file, "%d,", fixed_parms);
13086 /* 2 bitfields: number of floating point parameters (7 bits), parameters
13088 /* This is actually the number of fp registers that hold parameters;
13089 and thus the maximum value is 13. */
13090 /* Set parameters on stack bit if parameters are not in their original
13091 registers, regardless of whether they are on the stack? Xlc
13092 seems to set the bit when not optimizing. */
13093 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
13095 if (! optional_tbtab)
13098 /* Optional fields follow. Some are variable length. */
13100 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
13101 11 double float. */
13102 /* There is an entry for each parameter in a register, in the order that
13103 they occur in the parameter list. Any intervening arguments on the
13104 stack are ignored. If the list overflows a long (max possible length
13105 34 bits) then completely leave off all elements that don't fit. */
13106 /* Only emit this long if there was at least one parameter. */
13107 if (fixed_parms || float_parms)
13108 fprintf (file, "\t.long %d\n", parm_info);
13110 /* Offset from start of code to tb table. */
13111 fputs ("\t.long ", file);
13112 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
13114 RS6000_OUTPUT_BASENAME (file, fname);
13116 assemble_name (file, fname);
13118 fputs ("-.", file);
13120 RS6000_OUTPUT_BASENAME (file, fname);
13122 assemble_name (file, fname);
13126 /* Interrupt handler mask. */
13127 /* Omit this long, since we never set the interrupt handler bit
13130 /* Number of CTL (controlled storage) anchors. */
13131 /* Omit this long, since the has_ctl bit is never set above. */
13133 /* Displacement into stack of each CTL anchor. */
13134 /* Omit this list of longs, because there are no CTL anchors. */
13136 /* Length of function name. */
13139 fprintf (file, "\t.short %d\n", (int) strlen (fname));
13141 /* Function name. */
13142 assemble_string (fname, strlen (fname));
13144 /* Register for alloca automatic storage; this is always reg 31.
13145 Only emit this if the alloca bit was set above. */
13146 if (frame_pointer_needed)
13147 fputs ("\t.byte 31\n", file);
13149 fputs ("\t.align 2\n", file);
13153 /* A C compound statement that outputs the assembler code for a thunk
13154 function, used to implement C++ virtual function calls with
13155 multiple inheritance. The thunk acts as a wrapper around a virtual
13156 function, adjusting the implicit object parameter before handing
13157 control off to the real function.
13159 First, emit code to add the integer DELTA to the location that
13160 contains the incoming first argument. Assume that this argument
13161 contains a pointer, and is the one used to pass the `this' pointer
13162 in C++. This is the incoming argument *before* the function
13163 prologue, e.g. `%o0' on a sparc. The addition must preserve the
13164 values of all other incoming arguments.
13166 After the addition, emit code to jump to FUNCTION, which is a
13167 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
13168 not touch the return address. Hence returning from FUNCTION will
13169 return to whoever called the current `thunk'.
13171 The effect must be as if FUNCTION had been called directly with the
13172 adjusted first argument. This macro is responsible for emitting
13173 all of the code for a thunk function; output_function_prologue()
13174 and output_function_epilogue() are not invoked.
13176 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
13177 been extracted from it.) It might possibly be useful on some
13178 targets, but probably not.
13180 If you do not define this macro, the target-independent code in the
13181 C++ frontend will generate a less efficient heavyweight thunk that
13182 calls FUNCTION instead of jumping to it. The generic approach does
13183 not support varargs. */
13186 rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
13187 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
13190 rtx this, insn, funexp;
13192 reload_completed = 1;
13193 epilogue_completed = 1;
13194 no_new_pseudos = 1;
13196 /* Mark the end of the (empty) prologue. */
13197 emit_note (NOTE_INSN_PROLOGUE_END);
13199 /* Find the "this" pointer. If the function returns a structure,
13200 the structure return pointer is in r3. */
13201 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
13202 this = gen_rtx_REG (Pmode, 4);
13204 this = gen_rtx_REG (Pmode, 3);
13206 /* Apply the constant offset, if required. */
13209 rtx delta_rtx = GEN_INT (delta);
13210 emit_insn (TARGET_32BIT
13211 ? gen_addsi3 (this, this, delta_rtx)
13212 : gen_adddi3 (this, this, delta_rtx));
13215 /* Apply the offset from the vtable, if required. */
13218 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
13219 rtx tmp = gen_rtx_REG (Pmode, 12);
13221 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
13222 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
13224 emit_insn (TARGET_32BIT
13225 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
13226 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
13227 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
13231 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
13233 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
13235 emit_insn (TARGET_32BIT
13236 ? gen_addsi3 (this, this, tmp)
13237 : gen_adddi3 (this, this, tmp));
13240 /* Generate a tail call to the target function. */
13241 if (!TREE_USED (function))
13243 assemble_external (function);
13244 TREE_USED (function) = 1;
13246 funexp = XEXP (DECL_RTL (function), 0);
13247 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
13250 if (MACHOPIC_INDIRECT)
13251 funexp = machopic_indirect_call_target (funexp);
13254 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
13255 generate sibcall RTL explicitly to avoid constraint abort. */
13256 insn = emit_call_insn (
13257 gen_rtx_PARALLEL (VOIDmode,
13259 gen_rtx_CALL (VOIDmode,
13260 funexp, const0_rtx),
13261 gen_rtx_USE (VOIDmode, const0_rtx),
13262 gen_rtx_USE (VOIDmode,
13263 gen_rtx_REG (SImode,
13264 LINK_REGISTER_REGNUM)),
13265 gen_rtx_RETURN (VOIDmode))));
13266 SIBLING_CALL_P (insn) = 1;
13269 /* Run just enough of rest_of_compilation to get the insns emitted.
13270 There's not really enough bulk here to make other passes such as
13271 instruction scheduling worth while. Note that use_thunk calls
13272 assemble_start_function and assemble_end_function. */
13273 insn = get_insns ();
13274 insn_locators_initialize ();
13275 shorten_branches (insn);
13276 final_start_function (insn, file, 1);
13277 final (insn, file, 1, 0);
13278 final_end_function ();
13280 reload_completed = 0;
13281 epilogue_completed = 0;
13282 no_new_pseudos = 0;
13285 /* A quick summary of the various types of 'constant-pool tables'
13288 Target Flags Name One table per
13289 AIX (none) AIX TOC object file
13290 AIX -mfull-toc AIX TOC object file
13291 AIX -mminimal-toc AIX minimal TOC translation unit
13292 SVR4/EABI (none) SVR4 SDATA object file
13293 SVR4/EABI -fpic SVR4 pic object file
13294 SVR4/EABI -fPIC SVR4 PIC translation unit
13295 SVR4/EABI -mrelocatable EABI TOC function
13296 SVR4/EABI -maix AIX TOC object file
13297 SVR4/EABI -maix -mminimal-toc
13298 AIX minimal TOC translation unit
13300 Name Reg. Set by entries contains:
13301 made by addrs? fp? sum?
13303 AIX TOC 2 crt0 as Y option option
13304 AIX minimal TOC 30 prolog gcc Y Y option
13305 SVR4 SDATA 13 crt0 gcc N Y N
13306 SVR4 pic 30 prolog ld Y not yet N
13307 SVR4 PIC 30 prolog gcc Y option option
13308 EABI TOC 30 prolog gcc Y option option
13312 /* Hash functions for the hash table. */
13315 rs6000_hash_constant (rtx k)
13317 enum rtx_code code = GET_CODE (k);
13318 enum machine_mode mode = GET_MODE (k);
13319 unsigned result = (code << 3) ^ mode;
13320 const char *format;
13323 format = GET_RTX_FORMAT (code);
13324 flen = strlen (format);
13330 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
13333 if (mode != VOIDmode)
13334 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
13346 for (; fidx < flen; fidx++)
13347 switch (format[fidx])
13352 const char *str = XSTR (k, fidx);
13353 len = strlen (str);
13354 result = result * 613 + len;
13355 for (i = 0; i < len; i++)
13356 result = result * 613 + (unsigned) str[i];
13361 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
13365 result = result * 613 + (unsigned) XINT (k, fidx);
13368 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
13369 result = result * 613 + (unsigned) XWINT (k, fidx);
13373 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
13374 result = result * 613 + (unsigned) (XWINT (k, fidx)
13388 toc_hash_function (const void *hash_entry)
13390 const struct toc_hash_struct *thc =
13391 (const struct toc_hash_struct *) hash_entry;
13392 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
13395 /* Compare H1 and H2 for equivalence. */
13398 toc_hash_eq (const void *h1, const void *h2)
13400 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
13401 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
13403 if (((const struct toc_hash_struct *) h1)->key_mode
13404 != ((const struct toc_hash_struct *) h2)->key_mode)
13407 return rtx_equal_p (r1, r2);
13410 /* These are the names given by the C++ front-end to vtables, and
13411 vtable-like objects. Ideally, this logic should not be here;
13412 instead, there should be some programmatic way of inquiring as
13413 to whether or not an object is a vtable. */
13415 #define VTABLE_NAME_P(NAME) \
13416 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
13417 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
13418 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
13419 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
13420 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
13423 rs6000_output_symbol_ref (FILE *file, rtx x)
13425 /* Currently C++ toc references to vtables can be emitted before it
13426 is decided whether the vtable is public or private. If this is
13427 the case, then the linker will eventually complain that there is
13428 a reference to an unknown section. Thus, for vtables only,
13429 we emit the TOC reference to reference the symbol and not the
13431 const char *name = XSTR (x, 0);
13433 if (VTABLE_NAME_P (name))
13435 RS6000_OUTPUT_BASENAME (file, name);
13438 assemble_name (file, name);
13441 /* Output a TOC entry. We derive the entry name from what is being
13445 output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
13448 const char *name = buf;
13449 const char *real_name;
13456 /* When the linker won't eliminate them, don't output duplicate
13457 TOC entries (this happens on AIX if there is any kind of TOC,
13458 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
13460 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
13462 struct toc_hash_struct *h;
13465 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
13466 time because GGC is not initialized at that point. */
13467 if (toc_hash_table == NULL)
13468 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
13469 toc_hash_eq, NULL);
13471 h = ggc_alloc (sizeof (*h));
13473 h->key_mode = mode;
13474 h->labelno = labelno;
13476 found = htab_find_slot (toc_hash_table, h, 1);
13477 if (*found == NULL)
13479 else /* This is indeed a duplicate.
13480 Set this label equal to that label. */
13482 fputs ("\t.set ", file);
13483 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
13484 fprintf (file, "%d,", labelno);
13485 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
13486 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
13492 /* If we're going to put a double constant in the TOC, make sure it's
13493 aligned properly when strict alignment is on. */
13494 if (GET_CODE (x) == CONST_DOUBLE
13495 && STRICT_ALIGNMENT
13496 && GET_MODE_BITSIZE (mode) >= 64
13497 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
13498 ASM_OUTPUT_ALIGN (file, 3);
13501 (*targetm.asm_out.internal_label) (file, "LC", labelno);
13503 /* Handle FP constants specially. Note that if we have a minimal
13504 TOC, things we put here aren't actually in the TOC, so we can allow
13506 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
13508 REAL_VALUE_TYPE rv;
13511 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13512 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
13516 if (TARGET_MINIMAL_TOC)
13517 fputs (DOUBLE_INT_ASM_OP, file);
13519 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
13520 k[0] & 0xffffffff, k[1] & 0xffffffff,
13521 k[2] & 0xffffffff, k[3] & 0xffffffff);
13522 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
13523 k[0] & 0xffffffff, k[1] & 0xffffffff,
13524 k[2] & 0xffffffff, k[3] & 0xffffffff);
13529 if (TARGET_MINIMAL_TOC)
13530 fputs ("\t.long ", file);
13532 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
13533 k[0] & 0xffffffff, k[1] & 0xffffffff,
13534 k[2] & 0xffffffff, k[3] & 0xffffffff);
13535 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
13536 k[0] & 0xffffffff, k[1] & 0xffffffff,
13537 k[2] & 0xffffffff, k[3] & 0xffffffff);
13541 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
13543 REAL_VALUE_TYPE rv;
13546 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13547 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
13551 if (TARGET_MINIMAL_TOC)
13552 fputs (DOUBLE_INT_ASM_OP, file);
13554 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
13555 k[0] & 0xffffffff, k[1] & 0xffffffff);
13556 fprintf (file, "0x%lx%08lx\n",
13557 k[0] & 0xffffffff, k[1] & 0xffffffff);
13562 if (TARGET_MINIMAL_TOC)
13563 fputs ("\t.long ", file);
13565 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
13566 k[0] & 0xffffffff, k[1] & 0xffffffff);
13567 fprintf (file, "0x%lx,0x%lx\n",
13568 k[0] & 0xffffffff, k[1] & 0xffffffff);
13572 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
13574 REAL_VALUE_TYPE rv;
13577 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13578 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
13582 if (TARGET_MINIMAL_TOC)
13583 fputs (DOUBLE_INT_ASM_OP, file);
13585 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
13586 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
13591 if (TARGET_MINIMAL_TOC)
13592 fputs ("\t.long ", file);
13594 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
13595 fprintf (file, "0x%lx\n", l & 0xffffffff);
13599 else if (GET_MODE (x) == VOIDmode
13600 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
13602 unsigned HOST_WIDE_INT low;
13603 HOST_WIDE_INT high;
13605 if (GET_CODE (x) == CONST_DOUBLE)
13607 low = CONST_DOUBLE_LOW (x);
13608 high = CONST_DOUBLE_HIGH (x);
13611 #if HOST_BITS_PER_WIDE_INT == 32
13614 high = (low & 0x80000000) ? ~0 : 0;
13618 low = INTVAL (x) & 0xffffffff;
13619 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
13623 /* TOC entries are always Pmode-sized, but since this
13624 is a bigendian machine then if we're putting smaller
13625 integer constants in the TOC we have to pad them.
13626 (This is still a win over putting the constants in
13627 a separate constant pool, because then we'd have
13628 to have both a TOC entry _and_ the actual constant.)
13630 For a 32-bit target, CONST_INT values are loaded and shifted
13631 entirely within `low' and can be stored in one TOC entry. */
13633 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
13634 abort ();/* It would be easy to make this work, but it doesn't now. */
13636 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
13638 #if HOST_BITS_PER_WIDE_INT == 32
13639 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
13640 POINTER_SIZE, &low, &high, 0);
13643 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
13644 high = (HOST_WIDE_INT) low >> 32;
13651 if (TARGET_MINIMAL_TOC)
13652 fputs (DOUBLE_INT_ASM_OP, file);
13654 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
13655 (long) high & 0xffffffff, (long) low & 0xffffffff);
13656 fprintf (file, "0x%lx%08lx\n",
13657 (long) high & 0xffffffff, (long) low & 0xffffffff);
13662 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
13664 if (TARGET_MINIMAL_TOC)
13665 fputs ("\t.long ", file);
13667 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
13668 (long) high & 0xffffffff, (long) low & 0xffffffff);
13669 fprintf (file, "0x%lx,0x%lx\n",
13670 (long) high & 0xffffffff, (long) low & 0xffffffff);
13674 if (TARGET_MINIMAL_TOC)
13675 fputs ("\t.long ", file);
13677 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
13678 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
13684 if (GET_CODE (x) == CONST)
13686 if (GET_CODE (XEXP (x, 0)) != PLUS)
13689 base = XEXP (XEXP (x, 0), 0);
13690 offset = INTVAL (XEXP (XEXP (x, 0), 1));
13693 if (GET_CODE (base) == SYMBOL_REF)
13694 name = XSTR (base, 0);
13695 else if (GET_CODE (base) == LABEL_REF)
13696 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
13697 else if (GET_CODE (base) == CODE_LABEL)
13698 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
13702 real_name = (*targetm.strip_name_encoding) (name);
13703 if (TARGET_MINIMAL_TOC)
13704 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
13707 fprintf (file, "\t.tc %s", real_name);
13710 fprintf (file, ".N%d", - offset);
13712 fprintf (file, ".P%d", offset);
13714 fputs ("[TC],", file);
13717 /* Currently C++ toc references to vtables can be emitted before it
13718 is decided whether the vtable is public or private. If this is
13719 the case, then the linker will eventually complain that there is
13720 a TOC reference to an unknown section. Thus, for vtables only,
13721 we emit the TOC reference to reference the symbol and not the
13723 if (VTABLE_NAME_P (name))
13725 RS6000_OUTPUT_BASENAME (file, name);
13727 fprintf (file, "%d", offset);
13728 else if (offset > 0)
13729 fprintf (file, "+%d", offset);
13732 output_addr_const (file, x);
13736 /* Output an assembler pseudo-op to write an ASCII string of N characters
13737 starting at P to FILE.
13739 On the RS/6000, we have to do this using the .byte operation and
13740 write out special characters outside the quoted string.
13741 Also, the assembler is broken; very long strings are truncated,
13742 so we must artificially break them up early. */
13745 output_ascii (FILE *file, const char *p, int n)
13748 int i, count_string;
13749 const char *for_string = "\t.byte \"";
13750 const char *for_decimal = "\t.byte ";
13751 const char *to_close = NULL;
13754 for (i = 0; i < n; i++)
13757 if (c >= ' ' && c < 0177)
13760 fputs (for_string, file);
13763 /* Write two quotes to get one. */
13771 for_decimal = "\"\n\t.byte ";
13775 if (count_string >= 512)
13777 fputs (to_close, file);
13779 for_string = "\t.byte \"";
13780 for_decimal = "\t.byte ";
13788 fputs (for_decimal, file);
13789 fprintf (file, "%d", c);
13791 for_string = "\n\t.byte \"";
13792 for_decimal = ", ";
13798 /* Now close the string if we have written one. Then end the line. */
13800 fputs (to_close, file);
13803 /* Generate a unique section name for FILENAME for a section type
13804 represented by SECTION_DESC. Output goes into BUF.
13806 SECTION_DESC can be any string, as long as it is different for each
13807 possible section type.
13809 We name the section in the same manner as xlc. The name begins with an
13810 underscore followed by the filename (after stripping any leading directory
13811 names) with the last period replaced by the string SECTION_DESC. If
13812 FILENAME does not contain a period, SECTION_DESC is appended to the end of
13816 rs6000_gen_section_name (char **buf, const char *filename,
13817 const char *section_desc)
13819 const char *q, *after_last_slash, *last_period = 0;
13823 after_last_slash = filename;
13824 for (q = filename; *q; q++)
13827 after_last_slash = q + 1;
13828 else if (*q == '.')
13832 len = strlen (after_last_slash) + strlen (section_desc) + 2;
13833 *buf = (char *) xmalloc (len);
13838 for (q = after_last_slash; *q; q++)
13840 if (q == last_period)
13842 strcpy (p, section_desc);
13843 p += strlen (section_desc);
13847 else if (ISALNUM (*q))
13851 if (last_period == 0)
13852 strcpy (p, section_desc);
13857 /* Emit profile function. */
13860 output_profile_hook (int labelno ATTRIBUTE_UNUSED)
13862 if (TARGET_PROFILE_KERNEL)
13865 if (DEFAULT_ABI == ABI_AIX)
13867 #ifndef NO_PROFILE_COUNTERS
13868 # define NO_PROFILE_COUNTERS 0
13870 if (NO_PROFILE_COUNTERS)
13871 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
13875 const char *label_name;
13878 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
13879 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
13880 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
13882 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
13886 else if (DEFAULT_ABI == ABI_DARWIN)
13888 const char *mcount_name = RS6000_MCOUNT;
13889 int caller_addr_regno = LINK_REGISTER_REGNUM;
13891 /* Be conservative and always set this, at least for now. */
13892 current_function_uses_pic_offset_table = 1;
13895 /* For PIC code, set up a stub and collect the caller's address
13896 from r0, which is where the prologue puts it. */
13897 if (MACHOPIC_INDIRECT)
13899 mcount_name = machopic_stub_name (mcount_name);
13900 if (current_function_uses_pic_offset_table)
13901 caller_addr_regno = 0;
13904 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
13906 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
13910 /* Write function profiler code. */
13913 output_function_profiler (FILE *file, int labelno)
13918 switch (DEFAULT_ABI)
13927 warning ("no profiling of 64-bit code for this ABI");
13930 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
13931 fprintf (file, "\tmflr %s\n", reg_names[0]);
13934 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
13935 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13936 reg_names[0], save_lr, reg_names[1]);
13937 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
13938 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
13939 assemble_name (file, buf);
13940 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
13942 else if (flag_pic > 1)
13944 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13945 reg_names[0], save_lr, reg_names[1]);
13946 /* Now, we need to get the address of the label. */
13947 fputs ("\tbl 1f\n\t.long ", file);
13948 assemble_name (file, buf);
13949 fputs ("-.\n1:", file);
13950 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
13951 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
13952 reg_names[0], reg_names[11]);
13953 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
13954 reg_names[0], reg_names[0], reg_names[11]);
13958 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
13959 assemble_name (file, buf);
13960 fputs ("@ha\n", file);
13961 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13962 reg_names[0], save_lr, reg_names[1]);
13963 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
13964 assemble_name (file, buf);
13965 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
13968 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
13969 fprintf (file, "\tbl %s%s\n",
13970 RS6000_MCOUNT, flag_pic ? "@plt" : "");
13976 if (!TARGET_PROFILE_KERNEL)
13978 /* Don't do anything, done in output_profile_hook (). */
13985 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
13986 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
13988 if (current_function_needs_context)
13990 asm_fprintf (file, "\tstd %s,24(%s)\n",
13991 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13992 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13993 asm_fprintf (file, "\tld %s,24(%s)\n",
13994 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13997 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
14005 rs6000_use_dfa_pipeline_interface (void)
14010 /* Power4 load update and store update instructions are cracked into a
14011 load or store and an integer insn which are executed in the same cycle.
14012 Branches have their own dispatch slot which does not count against the
14013 GCC issue rate, but it changes the program flow so there are no other
14014 instructions to issue in this cycle. */
14017 rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
14018 int verbose ATTRIBUTE_UNUSED,
14019 rtx insn, int more)
14021 if (GET_CODE (PATTERN (insn)) == USE
14022 || GET_CODE (PATTERN (insn)) == CLOBBER)
14025 if (rs6000_sched_groups)
14027 if (is_microcoded_insn (insn))
14029 else if (is_cracked_insn (insn))
14030 return more > 2 ? more - 2 : 0;
14036 /* Adjust the cost of a scheduling dependency. Return the new cost of
14037 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
14040 rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn ATTRIBUTE_UNUSED,
14043 if (! recog_memoized (insn))
14046 if (REG_NOTE_KIND (link) != 0)
14049 if (REG_NOTE_KIND (link) == 0)
14051 /* Data dependency; DEP_INSN writes a register that INSN reads
14052 some cycles later. */
14053 switch (get_attr_type (insn))
14056 /* Tell the first scheduling pass about the latency between
14057 a mtctr and bctr (and mtlr and br/blr). The first
14058 scheduling pass will not know about this latency since
14059 the mtctr instruction, which has the latency associated
14060 to it, will be generated by reload. */
14061 return TARGET_POWER ? 5 : 4;
14063 /* Leave some extra cycles between a compare and its
14064 dependent branch, to inhibit expensive mispredicts. */
14065 if ((rs6000_cpu_attr == CPU_PPC603
14066 || rs6000_cpu_attr == CPU_PPC604
14067 || rs6000_cpu_attr == CPU_PPC604E
14068 || rs6000_cpu_attr == CPU_PPC620
14069 || rs6000_cpu_attr == CPU_PPC630
14070 || rs6000_cpu_attr == CPU_PPC750
14071 || rs6000_cpu_attr == CPU_PPC7400
14072 || rs6000_cpu_attr == CPU_PPC7450
14073 || rs6000_cpu_attr == CPU_POWER4
14074 || rs6000_cpu_attr == CPU_POWER5)
14075 && recog_memoized (dep_insn)
14076 && (INSN_CODE (dep_insn) >= 0)
14077 && (get_attr_type (dep_insn) == TYPE_CMP
14078 || get_attr_type (dep_insn) == TYPE_COMPARE
14079 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
14080 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
14081 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
14082 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
14083 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
14084 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
14089 /* Fall out to return default cost. */
14095 /* The function returns a true if INSN is microcoded.
14096 Return false otherwise. */
14099 is_microcoded_insn (rtx insn)
14101 if (!insn || !INSN_P (insn)
14102 || GET_CODE (PATTERN (insn)) == USE
14103 || GET_CODE (PATTERN (insn)) == CLOBBER)
14106 if (rs6000_sched_groups)
14108 enum attr_type type = get_attr_type (insn);
14109 if (type == TYPE_LOAD_EXT_U
14110 || type == TYPE_LOAD_EXT_UX
14111 || type == TYPE_LOAD_UX
14112 || type == TYPE_STORE_UX
14113 || type == TYPE_MFCR)
14120 /* The function returns a nonzero value if INSN can be scheduled only
14121 as the first insn in a dispatch group ("dispatch-slot restricted").
14122 In this case, the returned value indicates how many dispatch slots
14123 the insn occupies (at the beginning of the group).
14124 Return 0 otherwise. */
14127 is_dispatch_slot_restricted (rtx insn)
14129 enum attr_type type;
14131 if (!rs6000_sched_groups)
14135 || insn == NULL_RTX
14136 || GET_CODE (insn) == NOTE
14137 || GET_CODE (PATTERN (insn)) == USE
14138 || GET_CODE (PATTERN (insn)) == CLOBBER)
14141 type = get_attr_type (insn);
14148 case TYPE_DELAYED_CR:
14149 case TYPE_CR_LOGICAL:
14157 if (rs6000_cpu == PROCESSOR_POWER5
14158 && is_cracked_insn (insn))
14164 /* The function returns true if INSN is cracked into 2 instructions
14165 by the processor (and therefore occupies 2 issue slots). */
14168 is_cracked_insn (rtx insn)
14170 if (!insn || !INSN_P (insn)
14171 || GET_CODE (PATTERN (insn)) == USE
14172 || GET_CODE (PATTERN (insn)) == CLOBBER)
14175 if (rs6000_sched_groups)
14177 enum attr_type type = get_attr_type (insn);
14178 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
14179 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
14180 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
14181 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
14182 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
14183 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
14184 || type == TYPE_IDIV || type == TYPE_LDIV
14185 || type == TYPE_INSERT_WORD)
14192 /* The function returns true if INSN can be issued only from
14193 the branch slot. */
14196 is_branch_slot_insn (rtx insn)
14198 if (!insn || !INSN_P (insn)
14199 || GET_CODE (PATTERN (insn)) == USE
14200 || GET_CODE (PATTERN (insn)) == CLOBBER)
14203 if (rs6000_sched_groups)
14205 enum attr_type type = get_attr_type (insn);
14206 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
14214 /* A C statement (sans semicolon) to update the integer scheduling
14215 priority INSN_PRIORITY (INSN). Increase the priority to execute the
14216 INSN earlier, reduce the priority to execute INSN later. Do not
14217 define this macro if you do not need to adjust the scheduling
14218 priorities of insns. */
14221 rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
14223 /* On machines (like the 750) which have asymmetric integer units,
14224 where one integer unit can do multiply and divides and the other
14225 can't, reduce the priority of multiply/divide so it is scheduled
14226 before other integer operations. */
14229 if (! INSN_P (insn))
14232 if (GET_CODE (PATTERN (insn)) == USE)
14235 switch (rs6000_cpu_attr) {
14237 switch (get_attr_type (insn))
14244 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
14245 priority, priority);
14246 if (priority >= 0 && priority < 0x01000000)
14253 if (is_dispatch_slot_restricted (insn)
14254 && reload_completed
14255 && current_sched_info->sched_max_insns_priority
14256 && rs6000_sched_restricted_insns_priority)
14259 /* Prioritize insns that can be dispatched only in the first dispatch slot. */
14260 if (rs6000_sched_restricted_insns_priority == 1)
14261 /* Attach highest priority to insn. This means that in
14262 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
14263 precede 'priority' (critical path) considerations. */
14264 return current_sched_info->sched_max_insns_priority;
14265 else if (rs6000_sched_restricted_insns_priority == 2)
14266 /* Increase priority of insn by a minimal amount. This means that in
14267 haifa-sched.c:ready_sort(), only 'priority' (critical path) considerations
14268 precede dispatch-slot restriction considerations. */
14269 return (priority + 1);
14275 /* Return how many instructions the machine can issue per cycle. */
14278 rs6000_issue_rate (void)
14280 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
14281 if (!reload_completed)
14284 switch (rs6000_cpu_attr) {
14285 case CPU_RIOS1: /* ? */
14287 case CPU_PPC601: /* ? */
14310 /* Return how many instructions to look ahead for better insn
14314 rs6000_use_sched_lookahead (void)
14316 if (rs6000_cpu_attr == CPU_PPC8540)
14321 /* Determine is PAT refers to memory. */
14324 is_mem_ref (rtx pat)
14330 if (GET_CODE (pat) == MEM)
14333 /* Recursively process the pattern. */
14334 fmt = GET_RTX_FORMAT (GET_CODE (pat));
14336 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
14339 ret |= is_mem_ref (XEXP (pat, i));
14340 else if (fmt[i] == 'E')
14341 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
14342 ret |= is_mem_ref (XVECEXP (pat, i, j));
14348 /* Determine if PAT is a PATTERN of a load insn. */
14351 is_load_insn1 (rtx pat)
14353 if (!pat || pat == NULL_RTX)
14356 if (GET_CODE (pat) == SET)
14357 return is_mem_ref (SET_SRC (pat));
14359 if (GET_CODE (pat) == PARALLEL)
14363 for (i = 0; i < XVECLEN (pat, 0); i++)
14364 if (is_load_insn1 (XVECEXP (pat, 0, i)))
14371 /* Determine if INSN loads from memory. */
14374 is_load_insn (rtx insn)
14376 if (!insn || !INSN_P (insn))
14379 if (GET_CODE (insn) == CALL_INSN)
14382 return is_load_insn1 (PATTERN (insn));
14385 /* Determine if PAT is a PATTERN of a store insn. */
14388 is_store_insn1 (rtx pat)
14390 if (!pat || pat == NULL_RTX)
14393 if (GET_CODE (pat) == SET)
14394 return is_mem_ref (SET_DEST (pat));
14396 if (GET_CODE (pat) == PARALLEL)
14400 for (i = 0; i < XVECLEN (pat, 0); i++)
14401 if (is_store_insn1 (XVECEXP (pat, 0, i)))
14408 /* Determine if INSN stores to memory. */
14411 is_store_insn (rtx insn)
14413 if (!insn || !INSN_P (insn))
14416 return is_store_insn1 (PATTERN (insn));
14419 /* Returns whether the dependence between INSN and NEXT is considered
14420 costly by the given target. */
14423 rs6000_is_costly_dependence (rtx insn, rtx next, rtx link, int cost, int distance)
14425 /* If the flag is not enbled - no dependence is considered costly;
14426 allow all dependent insns in the same group.
14427 This is the most aggressive option. */
14428 if (rs6000_sched_costly_dep == no_dep_costly)
14431 /* If the flag is set to 1 - a dependence is always considered costly;
14432 do not allow dependent instructions in the same group.
14433 This is the most conservative option. */
14434 if (rs6000_sched_costly_dep == all_deps_costly)
14437 if (rs6000_sched_costly_dep == store_to_load_dep_costly
14438 && is_load_insn (next)
14439 && is_store_insn (insn))
14440 /* Prevent load after store in the same group. */
14443 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
14444 && is_load_insn (next)
14445 && is_store_insn (insn)
14446 && (!link || (int) REG_NOTE_KIND (link) == 0))
14447 /* Prevent load after store in the same group if it is a true dependence. */
14450 /* The flag is set to X; dependences with latency >= X are considered costly,
14451 and will not be scheduled in the same group. */
14452 if (rs6000_sched_costly_dep <= max_dep_latency
14453 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
14459 /* Return the next insn after INSN that is found before TAIL is reached,
14460 skipping any "non-active" insns - insns that will not actually occupy
14461 an issue slot. Return NULL_RTX if such an insn is not found. */
14464 get_next_active_insn (rtx insn, rtx tail)
14468 if (!insn || insn == tail)
14471 next_insn = NEXT_INSN (insn);
14474 && next_insn != tail
14475 && (GET_CODE(next_insn) == NOTE
14476 || GET_CODE (PATTERN (next_insn)) == USE
14477 || GET_CODE (PATTERN (next_insn)) == CLOBBER))
14479 next_insn = NEXT_INSN (next_insn);
14482 if (!next_insn || next_insn == tail)
14488 /* Return whether the presence of INSN causes a dispatch group termination
14489 of group WHICH_GROUP.
14491 If WHICH_GROUP == current_group, this function will return true if INSN
14492 causes the termination of the current group (i.e, the dispatch group to
14493 which INSN belongs). This means that INSN will be the last insn in the
14494 group it belongs to.
14496 If WHICH_GROUP == previous_group, this function will return true if INSN
14497 causes the termination of the previous group (i.e, the dispatch group that
14498 precedes the group to which INSN belongs). This means that INSN will be
14499 the first insn in the group it belongs to). */
14502 insn_terminates_group_p (rtx insn, enum group_termination which_group)
14504 enum attr_type type;
14509 type = get_attr_type (insn);
14511 if (is_microcoded_insn (insn))
14514 if (which_group == current_group)
14516 if (is_branch_slot_insn (insn))
14520 else if (which_group == previous_group)
14522 if (is_dispatch_slot_restricted (insn))
14530 /* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
14531 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
14534 is_costly_group (rtx *group_insns, rtx next_insn)
14539 int issue_rate = rs6000_issue_rate ();
14541 for (i = 0; i < issue_rate; i++)
14543 rtx insn = group_insns[i];
14546 for (link = INSN_DEPEND (insn); link != 0; link = XEXP (link, 1))
14548 rtx next = XEXP (link, 0);
14549 if (next == next_insn)
14551 cost = insn_cost (insn, link, next_insn);
14552 if (rs6000_is_costly_dependence (insn, next_insn, link, cost, 0))
14561 /* Utility of the function redefine_groups.
14562 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
14563 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
14564 to keep it "far" (in a separate group) from GROUP_INSNS, following
14565 one of the following schemes, depending on the value of the flag
14566 -minsert_sched_nops = X:
14567 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
14568 in order to force NEXT_INSN into a separate group.
14569 (2) X < sched_finish_regroup_exact: insert exactly X nops.
14570 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
14571 insertion (has a group just ended, how many vacant issue slots remain in the
14572 last group, and how many dispatch groups were encountered so far). */
14575 force_new_group (int sched_verbose, FILE *dump, rtx *group_insns, rtx next_insn,
14576 bool *group_end, int can_issue_more, int *group_count)
14580 int issue_rate = rs6000_issue_rate ();
14581 bool end = *group_end;
14584 if (next_insn == NULL_RTX)
14585 return can_issue_more;
14587 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
14588 return can_issue_more;
14590 force = is_costly_group (group_insns, next_insn);
14592 return can_issue_more;
14594 if (sched_verbose > 6)
14595 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
14596 *group_count ,can_issue_more);
14598 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
14601 can_issue_more = 0;
14603 /* Since only a branch can be issued in the last issue_slot, it is
14604 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
14605 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
14606 in this case the last nop will start a new group and the branch will be
14607 forced to the new group. */
14608 if (can_issue_more && !is_branch_slot_insn (next_insn))
14611 while (can_issue_more > 0)
14614 emit_insn_before (nop, next_insn);
14622 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
14624 int n_nops = rs6000_sched_insert_nops;
14626 /* Nops can't be issued from the branch slot, so the effective
14627 issue_rate for nops is 'issue_rate - 1'. */
14628 if (can_issue_more == 0)
14629 can_issue_more = issue_rate;
14631 if (can_issue_more == 0)
14633 can_issue_more = issue_rate - 1;
14636 for (i = 0; i < issue_rate; i++)
14638 group_insns[i] = 0;
14645 emit_insn_before (nop, next_insn);
14646 if (can_issue_more == issue_rate - 1) /* new group begins */
14649 if (can_issue_more == 0)
14651 can_issue_more = issue_rate - 1;
14654 for (i = 0; i < issue_rate; i++)
14656 group_insns[i] = 0;
14662 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
14665 *group_end = /* Is next_insn going to start a new group? */
14667 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
14668 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
14669 || (can_issue_more < issue_rate &&
14670 insn_terminates_group_p (next_insn, previous_group)));
14671 if (*group_end && end)
14674 if (sched_verbose > 6)
14675 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
14676 *group_count, can_issue_more);
14677 return can_issue_more;
14680 return can_issue_more;
14683 /* This function tries to synch the dispatch groups that the compiler "sees"
14684 with the dispatch groups that the processor dispatcher is expected to
14685 form in practice. It tries to achieve this synchronization by forcing the
14686 estimated processor grouping on the compiler (as opposed to the function
14687 'pad_goups' which tries to force the scheduler's grouping on the processor).
14689 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
14690 examines the (estimated) dispatch groups that will be formed by the processor
14691 dispatcher. It marks these group boundaries to reflect the estimated
14692 processor grouping, overriding the grouping that the scheduler had marked.
14693 Depending on the value of the flag '-minsert-sched-nops' this function can
14694 force certain insns into separate groups or force a certain distance between
14695 them by inserting nops, for example, if there exists a "costly dependence"
14698 The function estimates the group boundaries that the processor will form as
14699 folllows: It keeps track of how many vacant issue slots are available after
14700 each insn. A subsequent insn will start a new group if one of the following
14702 - no more vacant issue slots remain in the current dispatch group.
14703 - only the last issue slot, which is the branch slot, is vacant, but the next
14704 insn is not a branch.
14705 - only the last 2 or less issue slots, including the branch slot, are vacant,
14706 which means that a cracked insn (which occupies two issue slots) can't be
14707 issued in this group.
14708 - less than 'issue_rate' slots are vacant, and the next insn always needs to
14709 start a new group. */
14712 redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
14714 rtx insn, next_insn;
14716 int can_issue_more;
14719 int group_count = 0;
14723 issue_rate = rs6000_issue_rate ();
14724 group_insns = alloca (issue_rate * sizeof (rtx));
14725 for (i = 0; i < issue_rate; i++)
14727 group_insns[i] = 0;
14729 can_issue_more = issue_rate;
14731 insn = get_next_active_insn (prev_head_insn, tail);
14734 while (insn != NULL_RTX)
14736 slot = (issue_rate - can_issue_more);
14737 group_insns[slot] = insn;
14739 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
14740 if (insn_terminates_group_p (insn, current_group))
14741 can_issue_more = 0;
14743 next_insn = get_next_active_insn (insn, tail);
14744 if (next_insn == NULL_RTX)
14745 return group_count + 1;
14747 group_end = /* Is next_insn going to start a new group? */
14748 (can_issue_more == 0
14749 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
14750 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
14751 || (can_issue_more < issue_rate &&
14752 insn_terminates_group_p (next_insn, previous_group)));
14754 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
14755 next_insn, &group_end, can_issue_more, &group_count);
14760 can_issue_more = 0;
14761 for (i = 0; i < issue_rate; i++)
14763 group_insns[i] = 0;
14767 if (GET_MODE (next_insn) == TImode && can_issue_more)
14768 PUT_MODE(next_insn, VOIDmode);
14769 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
14770 PUT_MODE (next_insn, TImode);
14773 if (can_issue_more == 0)
14774 can_issue_more = issue_rate;
14777 return group_count;
14780 /* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
14781 dispatch group boundaries that the scheduler had marked. Pad with nops
14782 any dispatch groups which have vacant issue slots, in order to force the
14783 scheduler's grouping on the processor dispatcher. The function
14784 returns the number of dispatch groups found. */
14787 pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
14789 rtx insn, next_insn;
14792 int can_issue_more;
14794 int group_count = 0;
14796 /* Initialize issue_rate. */
14797 issue_rate = rs6000_issue_rate ();
14798 can_issue_more = issue_rate;
14800 insn = get_next_active_insn (prev_head_insn, tail);
14801 next_insn = get_next_active_insn (insn, tail);
14803 while (insn != NULL_RTX)
14806 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
14808 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
14810 if (next_insn == NULL_RTX)
14815 /* If the scheduler had marked group termination at this location
14816 (between insn and next_indn), and neither insn nor next_insn will
14817 force group termination, pad the group with nops to force group
14820 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
14821 && !insn_terminates_group_p (insn, current_group)
14822 && !insn_terminates_group_p (next_insn, previous_group))
14824 if (!is_branch_slot_insn(next_insn))
14827 while (can_issue_more)
14830 emit_insn_before (nop, next_insn);
14835 can_issue_more = issue_rate;
14840 next_insn = get_next_active_insn (insn, tail);
14843 return group_count;
14846 /* The following function is called at the end of scheduling BB.
14847 After reload, it inserts nops at insn group bundling. */
14850 rs6000_sched_finish (FILE *dump, int sched_verbose)
14855 fprintf (dump, "=== Finishing schedule.\n");
14857 if (reload_completed && rs6000_sched_groups)
14859 if (rs6000_sched_insert_nops == sched_finish_none)
14862 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
14863 n_groups = pad_groups (dump, sched_verbose,
14864 current_sched_info->prev_head,
14865 current_sched_info->next_tail);
14867 n_groups = redefine_groups (dump, sched_verbose,
14868 current_sched_info->prev_head,
14869 current_sched_info->next_tail);
14871 if (sched_verbose >= 6)
14873 fprintf (dump, "ngroups = %d\n", n_groups);
14874 print_rtl (dump, current_sched_info->prev_head);
14875 fprintf (dump, "Done finish_sched\n");
14880 /* Length in units of the trampoline for entering a nested function. */
14883 rs6000_trampoline_size (void)
14887 switch (DEFAULT_ABI)
14893 ret = (TARGET_32BIT) ? 12 : 24;
14898 ret = (TARGET_32BIT) ? 40 : 48;
14905 /* Emit RTL insns to initialize the variable parts of a trampoline.
14906 FNADDR is an RTX for the address of the function's pure code.
14907 CXT is an RTX for the static chain value for the function. */
14910 rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
14912 enum machine_mode pmode = Pmode;
14913 int regsize = (TARGET_32BIT) ? 4 : 8;
14914 rtx ctx_reg = force_reg (pmode, cxt);
14916 switch (DEFAULT_ABI)
14921 /* Macros to shorten the code expansions below. */
14922 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
14923 #define MEM_PLUS(addr,offset) \
14924 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
14926 /* Under AIX, just build the 3 word function descriptor */
14929 rtx fn_reg = gen_reg_rtx (pmode);
14930 rtx toc_reg = gen_reg_rtx (pmode);
14931 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
14932 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
14933 emit_move_insn (MEM_DEREF (addr), fn_reg);
14934 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
14935 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
14939 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
14942 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
14943 FALSE, VOIDmode, 4,
14945 GEN_INT (rs6000_trampoline_size ()), SImode,
14955 /* Table of valid machine attributes. */
14957 const struct attribute_spec rs6000_attribute_table[] =
14959 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
14960 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
14961 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
14962 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
14963 { NULL, 0, 0, false, false, false, NULL }
14966 /* Handle the "altivec" attribute. The attribute may have
14967 arguments as follows:
14969 __attribute__((altivec(vector__)))
14970 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
14971 __attribute__((altivec(bool__))) (always followed by 'unsigned')
14973 and may appear more than once (e.g., 'vector bool char') in a
14974 given declaration. */
14977 rs6000_handle_altivec_attribute (tree *node, tree name, tree args,
14978 int flags ATTRIBUTE_UNUSED,
14979 bool *no_add_attrs)
14981 tree type = *node, result = NULL_TREE;
14982 enum machine_mode mode;
14985 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
14986 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
14987 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
14990 while (POINTER_TYPE_P (type)
14991 || TREE_CODE (type) == FUNCTION_TYPE
14992 || TREE_CODE (type) == METHOD_TYPE
14993 || TREE_CODE (type) == ARRAY_TYPE)
14994 type = TREE_TYPE (type);
14996 mode = TYPE_MODE (type);
14998 if (rs6000_warn_altivec_long
14999 && (type == long_unsigned_type_node || type == long_integer_type_node))
15000 warning ("use of 'long' in AltiVec types is deprecated; use 'int'");
15002 switch (altivec_type)
15005 unsigned_p = TREE_UNSIGNED (type);
15009 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
15012 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
15015 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
15017 case SFmode: result = V4SF_type_node; break;
15018 /* If the user says 'vector int bool', we may be handed the 'bool'
15019 attribute _before_ the 'vector' attribute, and so select the proper
15020 type in the 'b' case below. */
15021 case V4SImode: case V8HImode: case V16QImode: result = type;
15028 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
15029 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
15030 case QImode: case V16QImode: result = bool_V16QI_type_node;
15037 case V8HImode: result = pixel_V8HI_type_node;
15043 if (result && result != type && TYPE_READONLY (type))
15044 result = build_qualified_type (result, TYPE_QUAL_CONST);
15046 *no_add_attrs = true; /* No need to hang on to the attribute. */
15049 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
15051 *node = reconstruct_complex_type (*node, result);
15056 /* AltiVec defines four built-in scalar types that serve as vector
15057 elements; we must teach the compiler how to mangle them. */
15059 static const char *
15060 rs6000_mangle_fundamental_type (tree type)
15062 if (type == bool_char_type_node) return "U6__boolc";
15063 if (type == bool_short_type_node) return "U6__bools";
15064 if (type == pixel_type_node) return "u7__pixel";
15065 if (type == bool_int_type_node) return "U6__booli";
15067 /* For all other types, use normal C++ mangling. */
15071 /* Handle a "longcall" or "shortcall" attribute; arguments as in
15072 struct attribute_spec.handler. */
15075 rs6000_handle_longcall_attribute (tree *node, tree name,
15076 tree args ATTRIBUTE_UNUSED,
15077 int flags ATTRIBUTE_UNUSED,
15078 bool *no_add_attrs)
15080 if (TREE_CODE (*node) != FUNCTION_TYPE
15081 && TREE_CODE (*node) != FIELD_DECL
15082 && TREE_CODE (*node) != TYPE_DECL)
15084 warning ("`%s' attribute only applies to functions",
15085 IDENTIFIER_POINTER (name));
15086 *no_add_attrs = true;
15092 /* Set longcall attributes on all functions declared when
15093 rs6000_default_long_calls is true. */
15095 rs6000_set_default_type_attributes (tree type)
15097 if (rs6000_default_long_calls
15098 && (TREE_CODE (type) == FUNCTION_TYPE
15099 || TREE_CODE (type) == METHOD_TYPE))
15100 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
15102 TYPE_ATTRIBUTES (type));
15105 /* Return a reference suitable for calling a function with the
15106 longcall attribute. */
15109 rs6000_longcall_ref (rtx call_ref)
15111 const char *call_name;
15114 if (GET_CODE (call_ref) != SYMBOL_REF)
15117 /* System V adds '.' to the internal name, so skip them. */
15118 call_name = XSTR (call_ref, 0);
15119 if (*call_name == '.')
15121 while (*call_name == '.')
15124 node = get_identifier (call_name);
15125 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
15128 return force_reg (Pmode, call_ref);
15131 #ifdef USING_ELFOS_H
15133 /* A C statement or statements to switch to the appropriate section
15134 for output of RTX in mode MODE. You can assume that RTX is some
15135 kind of constant in RTL. The argument MODE is redundant except in
15136 the case of a `const_int' rtx. Select the section by calling
15137 `text_section' or one of the alternatives for other sections.
15139 Do not define this macro if you put all constants in the read-only
15143 rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
15144 unsigned HOST_WIDE_INT align)
15146 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
15149 default_elf_select_rtx_section (mode, x, align);
15152 /* A C statement or statements to switch to the appropriate
15153 section for output of DECL. DECL is either a `VAR_DECL' node
15154 or a constant of some sort. RELOC indicates whether forming
15155 the initial value of DECL requires link-time relocations. */
15158 rs6000_elf_select_section (tree decl, int reloc,
15159 unsigned HOST_WIDE_INT align)
15161 /* Pretend that we're always building for a shared library when
15162 ABI_AIX, because otherwise we end up with dynamic relocations
15163 in read-only sections. This happens for function pointers,
15164 references to vtables in typeinfo, and probably other cases. */
15165 default_elf_select_section_1 (decl, reloc, align,
15166 flag_pic || DEFAULT_ABI == ABI_AIX);
15169 /* A C statement to build up a unique section name, expressed as a
15170 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
15171 RELOC indicates whether the initial value of EXP requires
15172 link-time relocations. If you do not define this macro, GCC will use
15173 the symbol name prefixed by `.' as the section name. Note - this
15174 macro can now be called for uninitialized data items as well as
15175 initialized data and functions. */
15178 rs6000_elf_unique_section (tree decl, int reloc)
15180 /* As above, pretend that we're always building for a shared library
15181 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
15182 default_unique_section_1 (decl, reloc,
15183 flag_pic || DEFAULT_ABI == ABI_AIX);
15186 /* For a SYMBOL_REF, set generic flags and then perform some
15187 target-specific processing.
15189 When the AIX ABI is requested on a non-AIX system, replace the
15190 function name with the real name (with a leading .) rather than the
15191 function descriptor name. This saves a lot of overriding code to
15192 read the prefixes. */
15195 rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
15197 default_encode_section_info (decl, rtl, first);
15200 && TREE_CODE (decl) == FUNCTION_DECL
15202 && DEFAULT_ABI == ABI_AIX)
15204 rtx sym_ref = XEXP (rtl, 0);
15205 size_t len = strlen (XSTR (sym_ref, 0));
15206 char *str = alloca (len + 2);
15208 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
15209 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
15214 rs6000_elf_in_small_data_p (tree decl)
15216 if (rs6000_sdata == SDATA_NONE)
15219 /* We want to merge strings, so we never consider them small data. */
15220 if (TREE_CODE (decl) == STRING_CST)
15223 /* Functions are never in the small data area. */
15224 if (TREE_CODE (decl) == FUNCTION_DECL)
15227 /* Thread-local vars can't go in the small data area. */
15228 if (TREE_CODE (decl) == VAR_DECL && DECL_THREAD_LOCAL (decl))
15231 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
15233 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
15234 if (strcmp (section, ".sdata") == 0
15235 || strcmp (section, ".sdata2") == 0
15236 || strcmp (section, ".sbss") == 0
15237 || strcmp (section, ".sbss2") == 0
15238 || strcmp (section, ".PPC.EMB.sdata0") == 0
15239 || strcmp (section, ".PPC.EMB.sbss0") == 0)
15244 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
15247 && (unsigned HOST_WIDE_INT) size <= g_switch_value
15248 /* If it's not public, and we're not going to reference it there,
15249 there's no need to put it in the small data section. */
15250 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
15257 #endif /* USING_ELFOS_H */
15260 /* Return a REG that occurs in ADDR with coefficient 1.
15261 ADDR can be effectively incremented by incrementing REG.
15263 r0 is special and we must not select it as an address
15264 register by this routine since our caller will try to
15265 increment the returned register via an "la" instruction. */
15268 find_addr_reg (rtx addr)
15270 while (GET_CODE (addr) == PLUS)
15272 if (GET_CODE (XEXP (addr, 0)) == REG
15273 && REGNO (XEXP (addr, 0)) != 0)
15274 addr = XEXP (addr, 0);
15275 else if (GET_CODE (XEXP (addr, 1)) == REG
15276 && REGNO (XEXP (addr, 1)) != 0)
15277 addr = XEXP (addr, 1);
15278 else if (CONSTANT_P (XEXP (addr, 0)))
15279 addr = XEXP (addr, 1);
15280 else if (CONSTANT_P (XEXP (addr, 1)))
15281 addr = XEXP (addr, 0);
15285 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
15291 rs6000_fatal_bad_address (rtx op)
15293 fatal_insn ("bad address", op);
15299 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
15300 reference and a constant. */
15303 symbolic_operand (rtx op)
15305 switch (GET_CODE (op))
15312 return (GET_CODE (op) == SYMBOL_REF ||
15313 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
15314 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
15315 && GET_CODE (XEXP (op, 1)) == CONST_INT);
15324 static tree branch_island_list = 0;
15326 /* Remember to generate a branch island for far calls to the given
15330 add_compiler_branch_island (tree label_name, tree function_name, int line_number)
15332 tree branch_island = build_tree_list (function_name, label_name);
15333 TREE_TYPE (branch_island) = build_int_2 (line_number, 0);
15334 TREE_CHAIN (branch_island) = branch_island_list;
15335 branch_island_list = branch_island;
15338 #define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
15339 #define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
15340 #define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
15341 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
15343 /* Generate far-jump branch islands for everything on the
15344 branch_island_list. Invoked immediately after the last instruction
15345 of the epilogue has been emitted; the branch-islands must be
15346 appended to, and contiguous with, the function body. Mach-O stubs
15347 are generated in machopic_output_stub(). */
15350 macho_branch_islands (void)
15353 tree branch_island;
15355 for (branch_island = branch_island_list;
15357 branch_island = TREE_CHAIN (branch_island))
15359 const char *label =
15360 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
15362 darwin_strip_name_encoding (
15363 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island)));
15364 char name_buf[512];
15365 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
15366 if (name[0] == '*' || name[0] == '&')
15367 strcpy (name_buf, name+1);
15371 strcpy (name_buf+1, name);
15373 strcpy (tmp_buf, "\n");
15374 strcat (tmp_buf, label);
15375 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
15376 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
15377 fprintf (asm_out_file, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED "\n",
15378 BRANCH_ISLAND_LINE_NUMBER(branch_island));
15379 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
15382 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
15383 strcat (tmp_buf, label);
15384 strcat (tmp_buf, "_pic\n");
15385 strcat (tmp_buf, label);
15386 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
15388 strcat (tmp_buf, "\taddis r11,r11,ha16(");
15389 strcat (tmp_buf, name_buf);
15390 strcat (tmp_buf, " - ");
15391 strcat (tmp_buf, label);
15392 strcat (tmp_buf, "_pic)\n");
15394 strcat (tmp_buf, "\tmtlr r0\n");
15396 strcat (tmp_buf, "\taddi r12,r11,lo16(");
15397 strcat (tmp_buf, name_buf);
15398 strcat (tmp_buf, " - ");
15399 strcat (tmp_buf, label);
15400 strcat (tmp_buf, "_pic)\n");
15402 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
15406 strcat (tmp_buf, ":\nlis r12,hi16(");
15407 strcat (tmp_buf, name_buf);
15408 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
15409 strcat (tmp_buf, name_buf);
15410 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
15412 output_asm_insn (tmp_buf, 0);
15413 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
15414 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
15415 fprintf(asm_out_file, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED "\n",
15416 BRANCH_ISLAND_LINE_NUMBER (branch_island));
15417 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
15420 branch_island_list = 0;
15423 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
15424 already there or not. */
15427 no_previous_def (tree function_name)
15429 tree branch_island;
15430 for (branch_island = branch_island_list;
15432 branch_island = TREE_CHAIN (branch_island))
15433 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
15438 /* GET_PREV_LABEL gets the label name from the previous definition of
15442 get_prev_label (tree function_name)
15444 tree branch_island;
15445 for (branch_island = branch_island_list;
15447 branch_island = TREE_CHAIN (branch_island))
15448 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
15449 return BRANCH_ISLAND_LABEL_NAME (branch_island);
15453 /* INSN is either a function call or a millicode call. It may have an
15454 unconditional jump in its delay slot.
15456 CALL_DEST is the routine we are calling. */
15459 output_call (rtx insn, rtx *operands, int dest_operand_number, int cookie_operand_number)
15461 static char buf[256];
15462 if (GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
15463 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
15466 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
15468 if (no_previous_def (funname))
15470 int line_number = 0;
15471 rtx label_rtx = gen_label_rtx ();
15472 char *label_buf, temp_buf[256];
15473 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
15474 CODE_LABEL_NUMBER (label_rtx));
15475 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
15476 labelname = get_identifier (label_buf);
15477 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
15479 line_number = NOTE_LINE_NUMBER (insn);
15480 add_compiler_branch_island (labelname, funname, line_number);
15483 labelname = get_prev_label (funname);
15485 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
15486 instruction will reach 'foo', otherwise link as 'bl L42'".
15487 "L42" should be a 'branch island', that will do a far jump to
15488 'foo'. Branch islands are generated in
15489 macho_branch_islands(). */
15490 sprintf (buf, "jbsr %%z%d,%.246s",
15491 dest_operand_number, IDENTIFIER_POINTER (labelname));
15494 sprintf (buf, "bl %%z%d", dest_operand_number);
15498 #endif /* TARGET_MACHO */
15500 /* Generate PIC and indirect symbol stubs. */
15503 machopic_output_stub (FILE *file, const char *symb, const char *stub)
15505 unsigned int length;
15506 char *symbol_name, *lazy_ptr_name;
15507 char *local_label_0;
15508 static int label = 0;
15510 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
15511 symb = (*targetm.strip_name_encoding) (symb);
15514 length = strlen (symb);
15515 symbol_name = alloca (length + 32);
15516 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
15518 lazy_ptr_name = alloca (length + 32);
15519 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
15522 machopic_picsymbol_stub1_section ();
15524 machopic_symbol_stub1_section ();
15525 fprintf (file, "\t.align 2\n");
15527 fprintf (file, "%s:\n", stub);
15528 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
15533 local_label_0 = alloca (sizeof("\"L0000000000$spb\""));
15534 sprintf (local_label_0, "\"L%011d$spb\"", label);
15536 fprintf (file, "\tmflr r0\n");
15537 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
15538 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
15539 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
15540 lazy_ptr_name, local_label_0);
15541 fprintf (file, "\tmtlr r0\n");
15542 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
15543 lazy_ptr_name, local_label_0);
15544 fprintf (file, "\tmtctr r12\n");
15545 fprintf (file, "\tbctr\n");
15549 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
15550 fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
15551 fprintf (file, "\tmtctr r12\n");
15552 fprintf (file, "\tbctr\n");
15555 machopic_lazy_symbol_ptr_section ();
15556 fprintf (file, "%s:\n", lazy_ptr_name);
15557 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
15558 fprintf (file, "\t.long dyld_stub_binding_helper\n");
15561 /* Legitimize PIC addresses. If the address is already
15562 position-independent, we return ORIG. Newly generated
15563 position-independent addresses go into a reg. This is REG if non
15564 zero, otherwise we allocate register(s) as necessary. */
15566 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
15569 rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
15574 if (reg == NULL && ! reload_in_progress && ! reload_completed)
15575 reg = gen_reg_rtx (Pmode);
15577 if (GET_CODE (orig) == CONST)
15579 if (GET_CODE (XEXP (orig, 0)) == PLUS
15580 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
15583 if (GET_CODE (XEXP (orig, 0)) == PLUS)
15585 /* Use a different reg for the intermediate value, as
15586 it will be marked UNCHANGING. */
15587 rtx reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode);
15590 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
15593 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
15599 if (GET_CODE (offset) == CONST_INT)
15601 if (SMALL_INT (offset))
15602 return plus_constant (base, INTVAL (offset));
15603 else if (! reload_in_progress && ! reload_completed)
15604 offset = force_reg (Pmode, offset);
15607 rtx mem = force_const_mem (Pmode, orig);
15608 return machopic_legitimize_pic_address (mem, Pmode, reg);
15611 return gen_rtx (PLUS, Pmode, base, offset);
15614 /* Fall back on generic machopic code. */
15615 return machopic_legitimize_pic_address (orig, mode, reg);
15618 /* This is just a placeholder to make linking work without having to
15619 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
15620 ever needed for Darwin (not too likely!) this would have to get a
15621 real definition. */
15628 #endif /* TARGET_MACHO */
15631 static unsigned int
15632 rs6000_elf_section_type_flags (tree decl, const char *name, int reloc)
15634 return default_section_type_flags_1 (decl, name, reloc,
15635 flag_pic || DEFAULT_ABI == ABI_AIX);
15638 /* Record an element in the table of global constructors. SYMBOL is
15639 a SYMBOL_REF of the function to be called; PRIORITY is a number
15640 between 0 and MAX_INIT_PRIORITY.
15642 This differs from default_named_section_asm_out_constructor in
15643 that we have special handling for -mrelocatable. */
15646 rs6000_elf_asm_out_constructor (rtx symbol, int priority)
15648 const char *section = ".ctors";
15651 if (priority != DEFAULT_INIT_PRIORITY)
15653 sprintf (buf, ".ctors.%.5u",
15654 /* Invert the numbering so the linker puts us in the proper
15655 order; constructors are run from right to left, and the
15656 linker sorts in increasing order. */
15657 MAX_INIT_PRIORITY - priority);
15661 named_section_flags (section, SECTION_WRITE);
15662 assemble_align (POINTER_SIZE);
15664 if (TARGET_RELOCATABLE)
15666 fputs ("\t.long (", asm_out_file);
15667 output_addr_const (asm_out_file, symbol);
15668 fputs (")@fixup\n", asm_out_file);
15671 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
15675 rs6000_elf_asm_out_destructor (rtx symbol, int priority)
15677 const char *section = ".dtors";
15680 if (priority != DEFAULT_INIT_PRIORITY)
15682 sprintf (buf, ".dtors.%.5u",
15683 /* Invert the numbering so the linker puts us in the proper
15684 order; constructors are run from right to left, and the
15685 linker sorts in increasing order. */
15686 MAX_INIT_PRIORITY - priority);
15690 named_section_flags (section, SECTION_WRITE);
15691 assemble_align (POINTER_SIZE);
15693 if (TARGET_RELOCATABLE)
15695 fputs ("\t.long (", asm_out_file);
15696 output_addr_const (asm_out_file, symbol);
15697 fputs (")@fixup\n", asm_out_file);
15700 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
15704 rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
15708 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
15709 ASM_OUTPUT_LABEL (file, name);
15710 fputs (DOUBLE_INT_ASM_OP, file);
15712 assemble_name (file, name);
15713 fputs (",.TOC.@tocbase,0\n\t.previous\n\t.size\t", file);
15714 assemble_name (file, name);
15715 fputs (",24\n\t.type\t.", file);
15716 assemble_name (file, name);
15717 fputs (",@function\n", file);
15718 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
15720 fputs ("\t.globl\t.", file);
15721 assemble_name (file, name);
15724 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
15726 ASM_OUTPUT_LABEL (file, name);
15730 if (TARGET_RELOCATABLE
15731 && (get_pool_size () != 0 || current_function_profile)
15736 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
15738 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
15739 fprintf (file, "\t.long ");
15740 assemble_name (file, buf);
15742 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15743 assemble_name (file, buf);
15747 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
15748 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
15750 if (DEFAULT_ABI == ABI_AIX)
15752 const char *desc_name, *orig_name;
15754 orig_name = (*targetm.strip_name_encoding) (name);
15755 desc_name = orig_name;
15756 while (*desc_name == '.')
15759 if (TREE_PUBLIC (decl))
15760 fprintf (file, "\t.globl %s\n", desc_name);
15762 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
15763 fprintf (file, "%s:\n", desc_name);
15764 fprintf (file, "\t.long %s\n", orig_name);
15765 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
15766 if (DEFAULT_ABI == ABI_AIX)
15767 fputs ("\t.long 0\n", file);
15768 fprintf (file, "\t.previous\n");
15770 ASM_OUTPUT_LABEL (file, name);
15774 rs6000_elf_end_indicate_exec_stack (void)
15777 file_end_indicate_exec_stack ();
15783 rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
15785 fputs (GLOBAL_ASM_OP, stream);
15786 RS6000_OUTPUT_BASENAME (stream, name);
15787 putc ('\n', stream);
15791 rs6000_xcoff_asm_named_section (const char *name, unsigned int flags)
15794 static const char * const suffix[3] = { "PR", "RO", "RW" };
15796 if (flags & SECTION_CODE)
15798 else if (flags & SECTION_WRITE)
15803 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
15804 (flags & SECTION_CODE) ? "." : "",
15805 name, suffix[smclass], flags & SECTION_ENTSIZE);
15809 rs6000_xcoff_select_section (tree decl, int reloc,
15810 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
15812 if (decl_readonly_section_1 (decl, reloc, 1))
15814 if (TREE_PUBLIC (decl))
15815 read_only_data_section ();
15817 read_only_private_data_section ();
15821 if (TREE_PUBLIC (decl))
15824 private_data_section ();
15829 rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
15833 /* Use select_section for private and uninitialized data. */
15834 if (!TREE_PUBLIC (decl)
15835 || DECL_COMMON (decl)
15836 || DECL_INITIAL (decl) == NULL_TREE
15837 || DECL_INITIAL (decl) == error_mark_node
15838 || (flag_zero_initialized_in_bss
15839 && initializer_zerop (DECL_INITIAL (decl))))
15842 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
15843 name = (*targetm.strip_name_encoding) (name);
15844 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
15847 /* Select section for constant in constant pool.
15849 On RS/6000, all constants are in the private read-only data area.
15850 However, if this is being placed in the TOC it must be output as a
15854 rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
15855 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
15857 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
15860 read_only_private_data_section ();
15863 /* Remove any trailing [DS] or the like from the symbol name. */
15865 static const char *
15866 rs6000_xcoff_strip_name_encoding (const char *name)
15871 len = strlen (name);
15872 if (name[len - 1] == ']')
15873 return ggc_alloc_string (name, len - 4);
15878 /* Section attributes. AIX is always PIC. */
15880 static unsigned int
15881 rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
15883 unsigned int align;
15884 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
15886 /* Align to at least UNIT size. */
15887 if (flags & SECTION_CODE)
15888 align = MIN_UNITS_PER_WORD;
15890 /* Increase alignment of large objects if not already stricter. */
15891 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
15892 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
15893 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
15895 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
15898 /* Output at beginning of assembler file.
15900 Initialize the section names for the RS/6000 at this point.
15902 Specify filename, including full path, to assembler.
15904 We want to go into the TOC section so at least one .toc will be emitted.
15905 Also, in order to output proper .bs/.es pairs, we need at least one static
15906 [RW] section emitted.
15908 Finally, declare mcount when profiling to make the assembler happy. */
15911 rs6000_xcoff_file_start (void)
15913 rs6000_gen_section_name (&xcoff_bss_section_name,
15914 main_input_filename, ".bss_");
15915 rs6000_gen_section_name (&xcoff_private_data_section_name,
15916 main_input_filename, ".rw_");
15917 rs6000_gen_section_name (&xcoff_read_only_section_name,
15918 main_input_filename, ".ro_");
15920 fputs ("\t.file\t", asm_out_file);
15921 output_quoted_string (asm_out_file, main_input_filename);
15922 fputc ('\n', asm_out_file);
15924 if (write_symbols != NO_DEBUG)
15925 private_data_section ();
15928 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
15929 rs6000_file_start ();
15932 /* Output at end of assembler file.
15933 On the RS/6000, referencing data should automatically pull in text. */
15936 rs6000_xcoff_file_end (void)
15939 fputs ("_section_.text:\n", asm_out_file);
15941 fputs (TARGET_32BIT
15942 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
15945 #endif /* TARGET_XCOFF */
15948 /* Cross-module name binding. Darwin does not support overriding
15949 functions at dynamic-link time. */
15952 rs6000_binds_local_p (tree decl)
15954 return default_binds_local_p_1 (decl, 0);
15958 /* Compute a (partial) cost for rtx X. Return true if the complete
15959 cost has been computed, and false if subexpressions should be
15960 scanned. In either case, *TOTAL contains the cost result. */
15963 rs6000_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
15968 /* On the RS/6000, if it is valid in the insn, it is free.
15969 So this always returns 0. */
15980 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
15981 && ((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1))
15982 + 0x8000) >= 0x10000)
15983 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
15984 ? COSTS_N_INSNS (2)
15985 : COSTS_N_INSNS (1));
15991 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
15992 && (INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff)) != 0
15993 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
15994 ? COSTS_N_INSNS (2)
15995 : COSTS_N_INSNS (1));
16001 *total = COSTS_N_INSNS (2);
16004 switch (rs6000_cpu)
16006 case PROCESSOR_RIOS1:
16007 case PROCESSOR_PPC405:
16008 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16009 ? COSTS_N_INSNS (5)
16010 : (INTVAL (XEXP (x, 1)) >= -256
16011 && INTVAL (XEXP (x, 1)) <= 255)
16012 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
16015 case PROCESSOR_PPC440:
16016 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16017 ? COSTS_N_INSNS (3)
16018 : COSTS_N_INSNS (2));
16021 case PROCESSOR_RS64A:
16022 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16023 ? GET_MODE (XEXP (x, 1)) != DImode
16024 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
16025 : (INTVAL (XEXP (x, 1)) >= -256
16026 && INTVAL (XEXP (x, 1)) <= 255)
16027 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
16030 case PROCESSOR_RIOS2:
16031 case PROCESSOR_MPCCORE:
16032 case PROCESSOR_PPC604e:
16033 *total = COSTS_N_INSNS (2);
16036 case PROCESSOR_PPC601:
16037 *total = COSTS_N_INSNS (5);
16040 case PROCESSOR_PPC603:
16041 case PROCESSOR_PPC7400:
16042 case PROCESSOR_PPC750:
16043 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16044 ? COSTS_N_INSNS (5)
16045 : (INTVAL (XEXP (x, 1)) >= -256
16046 && INTVAL (XEXP (x, 1)) <= 255)
16047 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
16050 case PROCESSOR_PPC7450:
16051 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16052 ? COSTS_N_INSNS (4)
16053 : COSTS_N_INSNS (3));
16056 case PROCESSOR_PPC403:
16057 case PROCESSOR_PPC604:
16058 case PROCESSOR_PPC8540:
16059 *total = COSTS_N_INSNS (4);
16062 case PROCESSOR_PPC620:
16063 case PROCESSOR_PPC630:
16064 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16065 ? GET_MODE (XEXP (x, 1)) != DImode
16066 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
16067 : (INTVAL (XEXP (x, 1)) >= -256
16068 && INTVAL (XEXP (x, 1)) <= 255)
16069 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
16072 case PROCESSOR_POWER4:
16073 case PROCESSOR_POWER5:
16074 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16075 ? GET_MODE (XEXP (x, 1)) != DImode
16076 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4)
16077 : COSTS_N_INSNS (2));
16086 if (GET_CODE (XEXP (x, 1)) == CONST_INT
16087 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
16089 *total = COSTS_N_INSNS (2);
16096 switch (rs6000_cpu)
16098 case PROCESSOR_RIOS1:
16099 *total = COSTS_N_INSNS (19);
16102 case PROCESSOR_RIOS2:
16103 *total = COSTS_N_INSNS (13);
16106 case PROCESSOR_RS64A:
16107 *total = (GET_MODE (XEXP (x, 1)) != DImode
16108 ? COSTS_N_INSNS (65)
16109 : COSTS_N_INSNS (67));
16112 case PROCESSOR_MPCCORE:
16113 *total = COSTS_N_INSNS (6);
16116 case PROCESSOR_PPC403:
16117 *total = COSTS_N_INSNS (33);
16120 case PROCESSOR_PPC405:
16121 *total = COSTS_N_INSNS (35);
16124 case PROCESSOR_PPC440:
16125 *total = COSTS_N_INSNS (34);
16128 case PROCESSOR_PPC601:
16129 *total = COSTS_N_INSNS (36);
16132 case PROCESSOR_PPC603:
16133 *total = COSTS_N_INSNS (37);
16136 case PROCESSOR_PPC604:
16137 case PROCESSOR_PPC604e:
16138 *total = COSTS_N_INSNS (20);
16141 case PROCESSOR_PPC620:
16142 case PROCESSOR_PPC630:
16143 *total = (GET_MODE (XEXP (x, 1)) != DImode
16144 ? COSTS_N_INSNS (21)
16145 : COSTS_N_INSNS (37));
16148 case PROCESSOR_PPC750:
16149 case PROCESSOR_PPC8540:
16150 case PROCESSOR_PPC7400:
16151 *total = COSTS_N_INSNS (19);
16154 case PROCESSOR_PPC7450:
16155 *total = COSTS_N_INSNS (23);
16158 case PROCESSOR_POWER4:
16159 case PROCESSOR_POWER5:
16160 *total = (GET_MODE (XEXP (x, 1)) != DImode
16161 ? COSTS_N_INSNS (18)
16162 : COSTS_N_INSNS (34));
16170 *total = COSTS_N_INSNS (4);
16174 /* MEM should be slightly more expensive than (plus (reg) (const)). */
16183 /* A C expression returning the cost of moving data from a register of class
16184 CLASS1 to one of CLASS2. */
16187 rs6000_register_move_cost (enum machine_mode mode,
16188 enum reg_class from, enum reg_class to)
16190 /* Moves from/to GENERAL_REGS. */
16191 if (reg_classes_intersect_p (to, GENERAL_REGS)
16192 || reg_classes_intersect_p (from, GENERAL_REGS))
16194 if (! reg_classes_intersect_p (to, GENERAL_REGS))
16197 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
16198 return (rs6000_memory_move_cost (mode, from, 0)
16199 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
16201 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift.... */
16202 else if (from == CR_REGS)
16206 /* A move will cost one instruction per GPR moved. */
16207 return 2 * HARD_REGNO_NREGS (0, mode);
16210 /* Moving between two similar registers is just one instruction. */
16211 else if (reg_classes_intersect_p (to, from))
16212 return mode == TFmode ? 4 : 2;
16214 /* Everything else has to go through GENERAL_REGS. */
16216 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
16217 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
16220 /* A C expressions returning the cost of moving data of MODE from a register to
16224 rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
16225 int in ATTRIBUTE_UNUSED)
16227 if (reg_classes_intersect_p (class, GENERAL_REGS))
16228 return 4 * HARD_REGNO_NREGS (0, mode);
16229 else if (reg_classes_intersect_p (class, FLOAT_REGS))
16230 return 4 * HARD_REGNO_NREGS (32, mode);
16231 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
16232 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
16234 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
16237 /* Return an RTX representing where to find the function value of a
16238 function returning MODE. */
16240 rs6000_complex_function_value (enum machine_mode mode)
16242 unsigned int regno;
16244 enum machine_mode inner = GET_MODE_INNER (mode);
16245 unsigned int inner_bytes = GET_MODE_SIZE (inner);
16247 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
16248 regno = FP_ARG_RETURN;
16251 regno = GP_ARG_RETURN;
16253 /* 32-bit is OK since it'll go in r3/r4. */
16254 if (TARGET_32BIT && inner_bytes >= 4)
16255 return gen_rtx_REG (mode, regno);
16258 if (inner_bytes >= 8)
16259 return gen_rtx_REG (mode, regno);
16261 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
16263 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
16264 GEN_INT (inner_bytes));
16265 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
16268 /* Define how to find the value returned by a function.
16269 VALTYPE is the data type of the value (as a tree).
16270 If the precise function being called is known, FUNC is its FUNCTION_DECL;
16271 otherwise, FUNC is 0.
16273 On the SPE, both FPs and vectors are returned in r3.
16275 On RS/6000 an integer value is in r3 and a floating-point value is in
16276 fp1, unless -msoft-float. */
16279 rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
16281 enum machine_mode mode;
16282 unsigned int regno;
16284 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
16286 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
16287 return gen_rtx_PARALLEL (DImode,
16289 gen_rtx_EXPR_LIST (VOIDmode,
16290 gen_rtx_REG (SImode, GP_ARG_RETURN),
16292 gen_rtx_EXPR_LIST (VOIDmode,
16293 gen_rtx_REG (SImode,
16294 GP_ARG_RETURN + 1),
16298 if ((INTEGRAL_TYPE_P (valtype)
16299 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
16300 || POINTER_TYPE_P (valtype))
16301 mode = TARGET_32BIT ? SImode : DImode;
16303 mode = TYPE_MODE (valtype);
16305 if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
16306 regno = FP_ARG_RETURN;
16307 else if (TREE_CODE (valtype) == COMPLEX_TYPE
16308 && targetm.calls.split_complex_arg)
16309 return rs6000_complex_function_value (mode);
16310 else if (TREE_CODE (valtype) == VECTOR_TYPE
16311 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
16312 regno = ALTIVEC_ARG_RETURN;
16314 regno = GP_ARG_RETURN;
16316 return gen_rtx_REG (mode, regno);
16319 /* Define how to find the value returned by a library function
16320 assuming the value has mode MODE. */
16322 rs6000_libcall_value (enum machine_mode mode)
16324 unsigned int regno;
16326 if (GET_MODE_CLASS (mode) == MODE_FLOAT
16327 && TARGET_HARD_FLOAT && TARGET_FPRS)
16328 regno = FP_ARG_RETURN;
16329 else if (ALTIVEC_VECTOR_MODE (mode)
16330 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
16331 regno = ALTIVEC_ARG_RETURN;
16332 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
16333 return rs6000_complex_function_value (mode);
16335 regno = GP_ARG_RETURN;
16337 return gen_rtx_REG (mode, regno);
16340 /* Define the offset between two registers, FROM to be eliminated and its
16341 replacement TO, at the start of a routine. */
16343 rs6000_initial_elimination_offset (int from, int to)
16345 rs6000_stack_t *info = rs6000_stack_info ();
16346 HOST_WIDE_INT offset;
16348 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
16349 offset = info->push_p ? 0 : -info->total_size;
16350 else if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
16351 offset = info->total_size;
16352 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
16353 offset = info->push_p ? info->total_size : 0;
16354 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
16362 /* Return true if TYPE is of type __ev64_opaque__. */
16365 is_ev64_opaque_type (tree type)
16368 && (type == opaque_V2SI_type_node
16369 || type == opaque_V2SF_type_node
16370 || type == opaque_p_V2SI_type_node));
16374 rs6000_dwarf_register_span (rtx reg)
16378 if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
16381 regno = REGNO (reg);
16383 /* The duality of the SPE register size wreaks all kinds of havoc.
16384 This is a way of distinguishing r0 in 32-bits from r0 in
16387 gen_rtx_PARALLEL (VOIDmode,
16390 gen_rtx_REG (SImode, regno + 1200),
16391 gen_rtx_REG (SImode, regno))
16393 gen_rtx_REG (SImode, regno),
16394 gen_rtx_REG (SImode, regno + 1200)));
16397 /* Map internal gcc register numbers to DWARF2 register numbers. */
16400 rs6000_dbx_register_number (unsigned int regno)
16402 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
16404 if (regno == MQ_REGNO)
16406 if (regno == LINK_REGISTER_REGNUM)
16408 if (regno == COUNT_REGISTER_REGNUM)
16410 if (CR_REGNO_P (regno))
16411 return regno - CR0_REGNO + 86;
16412 if (regno == XER_REGNO)
16414 if (ALTIVEC_REGNO_P (regno))
16415 return regno - FIRST_ALTIVEC_REGNO + 1124;
16416 if (regno == VRSAVE_REGNO)
16418 if (regno == VSCR_REGNO)
16420 if (regno == SPE_ACC_REGNO)
16422 if (regno == SPEFSCR_REGNO)
16424 /* SPE high reg number. We get these values of regno from
16425 rs6000_dwarf_register_span. */
16426 if (regno >= 1200 && regno < 1232)
16432 #include "gt-rs6000.h"