1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
39 #include "basic-block.h"
43 #ifndef STACK_PUSH_CODE
44 #ifdef STACK_GROWS_DOWNWARD
45 #define STACK_PUSH_CODE PRE_DEC
47 #define STACK_PUSH_CODE PRE_INC
51 #ifndef STACK_POP_CODE
52 #ifdef STACK_GROWS_DOWNWARD
53 #define STACK_POP_CODE POST_INC
55 #define STACK_POP_CODE POST_DEC
59 static void validate_replace_rtx_1 PARAMS ((rtx *, rtx, rtx, rtx));
60 static rtx *find_single_use_1 PARAMS ((rtx, rtx *));
61 static void validate_replace_src_1 PARAMS ((rtx *, void *));
62 static rtx split_insn PARAMS ((rtx));
64 /* Nonzero means allow operands to be volatile.
65 This should be 0 if you are generating rtl, such as if you are calling
66 the functions in optabs.c and expmed.c (most of the time).
67 This should be 1 if all valid insns need to be recognized,
68 such as in regclass.c and final.c and reload.c.
70 init_recog and init_recog_no_volatile are responsible for setting this. */
74 struct recog_data recog_data;
76 /* Contains a vector of operand_alternative structures for every operand.
77 Set up by preprocess_constraints. */
78 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
80 /* On return from `constrain_operands', indicate which alternative
83 int which_alternative;
85 /* Nonzero after end of reload pass.
86 Set to 1 or 0 by toplev.c.
87 Controls the significance of (SUBREG (MEM)). */
91 /* Initialize data used by the function `recog'.
92 This must be called once in the compilation of a function
93 before any insn recognition may be done in the function. */
96 init_recog_no_volatile ()
107 /* Try recognizing the instruction INSN,
108 and return the code number that results.
109 Remember the code so that repeated calls do not
110 need to spend the time for actual rerecognition.
112 This function is the normal interface to instruction recognition.
113 The automatically-generated function `recog' is normally called
114 through this one. (The only exception is in combine.c.) */
117 recog_memoized_1 (insn)
120 if (INSN_CODE (insn) < 0)
121 INSN_CODE (insn) = recog (PATTERN (insn), insn, 0);
122 return INSN_CODE (insn);
125 /* Check that X is an insn-body for an `asm' with operands
126 and that the operands mentioned in it are legitimate. */
129 check_asm_operands (x)
134 const char **constraints;
137 /* Post-reload, be more strict with things. */
138 if (reload_completed)
140 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
141 extract_insn (make_insn_raw (x));
142 constrain_operands (1);
143 return which_alternative >= 0;
146 noperands = asm_noperands (x);
152 operands = (rtx *) alloca (noperands * sizeof (rtx));
153 constraints = (const char **) alloca (noperands * sizeof (char *));
155 decode_asm_operands (x, operands, NULL, constraints, NULL);
157 for (i = 0; i < noperands; i++)
159 const char *c = constraints[i];
162 if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
163 c = constraints[c[0] - '0'];
165 if (! asm_operand_ok (operands[i], c))
172 /* Static data for the next two routines. */
174 typedef struct change_t
182 static change_t *changes;
183 static int changes_allocated;
185 static int num_changes = 0;
187 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
188 at which NEW will be placed. If OBJECT is zero, no validation is done,
189 the change is simply made.
191 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
192 will be called with the address and mode as parameters. If OBJECT is
193 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
196 IN_GROUP is non-zero if this is part of a group of changes that must be
197 performed as a group. In that case, the changes will be stored. The
198 function `apply_change_group' will validate and apply the changes.
200 If IN_GROUP is zero, this is a single change. Try to recognize the insn
201 or validate the memory reference with the change applied. If the result
202 is not valid for the machine, suppress the change and return zero.
203 Otherwise, perform the change and return 1. */
206 validate_change (object, loc, new, in_group)
214 if (old == new || rtx_equal_p (old, new))
217 if (in_group == 0 && num_changes != 0)
222 /* Save the information describing this change. */
223 if (num_changes >= changes_allocated)
225 if (changes_allocated == 0)
226 /* This value allows for repeated substitutions inside complex
227 indexed addresses, or changes in up to 5 insns. */
228 changes_allocated = MAX_RECOG_OPERANDS * 5;
230 changes_allocated *= 2;
233 (change_t*) xrealloc (changes,
234 sizeof (change_t) * changes_allocated);
237 changes[num_changes].object = object;
238 changes[num_changes].loc = loc;
239 changes[num_changes].old = old;
241 if (object && GET_CODE (object) != MEM)
243 /* Set INSN_CODE to force rerecognition of insn. Save old code in
245 changes[num_changes].old_code = INSN_CODE (object);
246 INSN_CODE (object) = -1;
251 /* If we are making a group of changes, return 1. Otherwise, validate the
252 change group we made. */
257 return apply_change_group ();
260 /* This subroutine of apply_change_group verifies whether the changes to INSN
261 were valid; i.e. whether INSN can still be recognized. */
264 insn_invalid_p (insn)
267 rtx pat = PATTERN (insn);
268 int num_clobbers = 0;
269 /* If we are before reload and the pattern is a SET, see if we can add
271 int icode = recog (pat, insn,
272 (GET_CODE (pat) == SET
273 && ! reload_completed && ! reload_in_progress)
274 ? &num_clobbers : 0);
275 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
278 /* If this is an asm and the operand aren't legal, then fail. Likewise if
279 this is not an asm and the insn wasn't recognized. */
280 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
281 || (!is_asm && icode < 0))
284 /* If we have to add CLOBBERs, fail if we have to add ones that reference
285 hard registers since our callers can't know if they are live or not.
286 Otherwise, add them. */
287 if (num_clobbers > 0)
291 if (added_clobbers_hard_reg_p (icode))
294 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
295 XVECEXP (newpat, 0, 0) = pat;
296 add_clobbers (newpat, icode);
297 PATTERN (insn) = pat = newpat;
300 /* After reload, verify that all constraints are satisfied. */
301 if (reload_completed)
305 if (! constrain_operands (1))
309 INSN_CODE (insn) = icode;
313 /* Apply a group of changes previously issued with `validate_change'.
314 Return 1 if all changes are valid, zero otherwise. */
317 apply_change_group ()
320 rtx last_validated = NULL_RTX;
322 /* The changes have been applied and all INSN_CODEs have been reset to force
325 The changes are valid if we aren't given an object, or if we are
326 given a MEM and it still is a valid address, or if this is in insn
327 and it is recognized. In the latter case, if reload has completed,
328 we also require that the operands meet the constraints for
331 for (i = 0; i < num_changes; i++)
333 rtx object = changes[i].object;
335 /* if there is no object to test or if it is the same as the one we
336 already tested, ignore it. */
337 if (object == 0 || object == last_validated)
340 if (GET_CODE (object) == MEM)
342 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
345 else if (insn_invalid_p (object))
347 rtx pat = PATTERN (object);
349 /* Perhaps we couldn't recognize the insn because there were
350 extra CLOBBERs at the end. If so, try to re-recognize
351 without the last CLOBBER (later iterations will cause each of
352 them to be eliminated, in turn). But don't do this if we
353 have an ASM_OPERAND. */
354 if (GET_CODE (pat) == PARALLEL
355 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
356 && asm_noperands (PATTERN (object)) < 0)
360 if (XVECLEN (pat, 0) == 2)
361 newpat = XVECEXP (pat, 0, 0);
367 = gen_rtx_PARALLEL (VOIDmode,
368 rtvec_alloc (XVECLEN (pat, 0) - 1));
369 for (j = 0; j < XVECLEN (newpat, 0); j++)
370 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
373 /* Add a new change to this group to replace the pattern
374 with this new pattern. Then consider this change
375 as having succeeded. The change we added will
376 cause the entire call to fail if things remain invalid.
378 Note that this can lose if a later change than the one
379 we are processing specified &XVECEXP (PATTERN (object), 0, X)
380 but this shouldn't occur. */
382 validate_change (object, &PATTERN (object), newpat, 1);
385 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
386 /* If this insn is a CLOBBER or USE, it is always valid, but is
392 last_validated = object;
395 if (i == num_changes)
407 /* Return the number of changes so far in the current group. */
410 num_validated_changes ()
415 /* Retract the changes numbered NUM and up. */
423 /* Back out all the changes. Do this in the opposite order in which
425 for (i = num_changes - 1; i >= num; i--)
427 *changes[i].loc = changes[i].old;
428 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
429 INSN_CODE (changes[i].object) = changes[i].old_code;
434 /* Replace every occurrence of FROM in X with TO. Mark each change with
435 validate_change passing OBJECT. */
438 validate_replace_rtx_1 (loc, from, to, object)
440 rtx from, to, object;
446 enum machine_mode op0_mode = VOIDmode;
447 int prev_changes = num_changes;
454 fmt = GET_RTX_FORMAT (code);
456 op0_mode = GET_MODE (XEXP (x, 0));
458 /* X matches FROM if it is the same rtx or they are both referring to the
459 same register in the same mode. Avoid calling rtx_equal_p unless the
460 operands look similar. */
463 || (GET_CODE (x) == REG && GET_CODE (from) == REG
464 && GET_MODE (x) == GET_MODE (from)
465 && REGNO (x) == REGNO (from))
466 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
467 && rtx_equal_p (x, from)))
469 validate_change (object, loc, to, 1);
473 /* Call ourself recursively to perform the replacements. */
475 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
478 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
479 else if (fmt[i] == 'E')
480 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
481 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
484 /* If we didn't substitute, there is nothing more to do. */
485 if (num_changes == prev_changes)
488 /* Allow substituted expression to have different mode. This is used by
489 regmove to change mode of pseudo register. */
490 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
491 op0_mode = GET_MODE (XEXP (x, 0));
493 /* Do changes needed to keep rtx consistent. Don't do any other
494 simplifications, as it is not our job. */
496 if ((GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
497 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
499 validate_change (object, loc,
500 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
501 : swap_condition (code),
502 GET_MODE (x), XEXP (x, 1),
511 /* If we have a PLUS whose second operand is now a CONST_INT, use
512 plus_constant to try to simplify it.
513 ??? We may want later to remove this, once simplification is
514 separated from this function. */
515 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
516 validate_change (object, loc,
518 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
521 if (GET_CODE (XEXP (x, 1)) == CONST_INT
522 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
523 validate_change (object, loc,
525 (PLUS, GET_MODE (x), XEXP (x, 0),
526 simplify_gen_unary (NEG,
527 GET_MODE (x), XEXP (x, 1),
532 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
534 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
536 /* If any of the above failed, substitute in something that
537 we know won't be recognized. */
539 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
540 validate_change (object, loc, new, 1);
544 /* All subregs possible to simplify should be simplified. */
545 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
548 /* Subregs of VOIDmode operands are incorrect. */
549 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
550 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
552 validate_change (object, loc, new, 1);
556 /* If we are replacing a register with memory, try to change the memory
557 to be the mode required for memory in extract operations (this isn't
558 likely to be an insertion operation; if it was, nothing bad will
559 happen, we might just fail in some cases). */
561 if (GET_CODE (XEXP (x, 0)) == MEM
562 && GET_CODE (XEXP (x, 1)) == CONST_INT
563 && GET_CODE (XEXP (x, 2)) == CONST_INT
564 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
565 && !MEM_VOLATILE_P (XEXP (x, 0)))
567 enum machine_mode wanted_mode = VOIDmode;
568 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
569 int pos = INTVAL (XEXP (x, 2));
571 if (GET_CODE (x) == ZERO_EXTRACT)
573 enum machine_mode new_mode
574 = mode_for_extraction (EP_extzv, 1);
575 if (new_mode != MAX_MACHINE_MODE)
576 wanted_mode = new_mode;
578 else if (GET_CODE (x) == SIGN_EXTRACT)
580 enum machine_mode new_mode
581 = mode_for_extraction (EP_extv, 1);
582 if (new_mode != MAX_MACHINE_MODE)
583 wanted_mode = new_mode;
586 /* If we have a narrower mode, we can do something. */
587 if (wanted_mode != VOIDmode
588 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
590 int offset = pos / BITS_PER_UNIT;
593 /* If the bytes and bits are counted differently, we
594 must adjust the offset. */
595 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
597 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
600 pos %= GET_MODE_BITSIZE (wanted_mode);
602 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
604 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
605 validate_change (object, &XEXP (x, 0), newmem, 1);
616 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
617 with TO. After all changes have been made, validate by seeing
618 if INSN is still valid. */
621 validate_replace_rtx_subexp (from, to, insn, loc)
622 rtx from, to, insn, *loc;
624 validate_replace_rtx_1 (loc, from, to, insn);
625 return apply_change_group ();
628 /* Try replacing every occurrence of FROM in INSN with TO. After all
629 changes have been made, validate by seeing if INSN is still valid. */
632 validate_replace_rtx (from, to, insn)
635 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
636 return apply_change_group ();
639 /* Try replacing every occurrence of FROM in INSN with TO. */
642 validate_replace_rtx_group (from, to, insn)
645 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
648 /* Function called by note_uses to replace used subexpressions. */
649 struct validate_replace_src_data
651 rtx from; /* Old RTX */
652 rtx to; /* New RTX */
653 rtx insn; /* Insn in which substitution is occurring. */
657 validate_replace_src_1 (x, data)
661 struct validate_replace_src_data *d
662 = (struct validate_replace_src_data *) data;
664 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
667 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
668 SET_DESTs. After all changes have been made, validate by seeing if
669 INSN is still valid. */
672 validate_replace_src (from, to, insn)
675 struct validate_replace_src_data d;
680 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
681 return apply_change_group ();
685 /* Return 1 if the insn using CC0 set by INSN does not contain
686 any ordered tests applied to the condition codes.
687 EQ and NE tests do not count. */
690 next_insn_tests_no_inequality (insn)
693 rtx next = next_cc0_user (insn);
695 /* If there is no next insn, we have to take the conservative choice. */
699 return ((GET_CODE (next) == JUMP_INSN
700 || GET_CODE (next) == INSN
701 || GET_CODE (next) == CALL_INSN)
702 && ! inequality_comparisons_p (PATTERN (next)));
705 #if 0 /* This is useless since the insn that sets the cc's
706 must be followed immediately by the use of them. */
707 /* Return 1 if the CC value set up by INSN is not used. */
710 next_insns_test_no_inequality (insn)
713 rtx next = NEXT_INSN (insn);
715 for (; next != 0; next = NEXT_INSN (next))
717 if (GET_CODE (next) == CODE_LABEL
718 || GET_CODE (next) == BARRIER)
720 if (GET_CODE (next) == NOTE)
722 if (inequality_comparisons_p (PATTERN (next)))
724 if (sets_cc0_p (PATTERN (next)) == 1)
726 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
734 /* This is used by find_single_use to locate an rtx that contains exactly one
735 use of DEST, which is typically either a REG or CC0. It returns a
736 pointer to the innermost rtx expression containing DEST. Appearances of
737 DEST that are being used to totally replace it are not counted. */
740 find_single_use_1 (dest, loc)
745 enum rtx_code code = GET_CODE (x);
763 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
764 of a REG that occupies all of the REG, the insn uses DEST if
765 it is mentioned in the destination or the source. Otherwise, we
766 need just check the source. */
767 if (GET_CODE (SET_DEST (x)) != CC0
768 && GET_CODE (SET_DEST (x)) != PC
769 && GET_CODE (SET_DEST (x)) != REG
770 && ! (GET_CODE (SET_DEST (x)) == SUBREG
771 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
772 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
773 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
774 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
775 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
778 return find_single_use_1 (dest, &SET_SRC (x));
782 return find_single_use_1 (dest, &XEXP (x, 0));
788 /* If it wasn't one of the common cases above, check each expression and
789 vector of this code. Look for a unique usage of DEST. */
791 fmt = GET_RTX_FORMAT (code);
792 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
796 if (dest == XEXP (x, i)
797 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
798 && REGNO (dest) == REGNO (XEXP (x, i))))
801 this_result = find_single_use_1 (dest, &XEXP (x, i));
804 result = this_result;
805 else if (this_result)
806 /* Duplicate usage. */
809 else if (fmt[i] == 'E')
813 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
815 if (XVECEXP (x, i, j) == dest
816 || (GET_CODE (dest) == REG
817 && GET_CODE (XVECEXP (x, i, j)) == REG
818 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
821 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
824 result = this_result;
825 else if (this_result)
834 /* See if DEST, produced in INSN, is used only a single time in the
835 sequel. If so, return a pointer to the innermost rtx expression in which
838 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
840 This routine will return usually zero either before flow is called (because
841 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
842 note can't be trusted).
844 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
845 care about REG_DEAD notes or LOG_LINKS.
847 Otherwise, we find the single use by finding an insn that has a
848 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
849 only referenced once in that insn, we know that it must be the first
850 and last insn referencing DEST. */
853 find_single_use (dest, insn, ploc)
865 next = NEXT_INSN (insn);
867 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
870 result = find_single_use_1 (dest, &PATTERN (next));
877 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
880 for (next = next_nonnote_insn (insn);
881 next != 0 && GET_CODE (next) != CODE_LABEL;
882 next = next_nonnote_insn (next))
883 if (INSN_P (next) && dead_or_set_p (next, dest))
885 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
886 if (XEXP (link, 0) == insn)
891 result = find_single_use_1 (dest, &PATTERN (next));
901 /* Return 1 if OP is a valid general operand for machine mode MODE.
902 This is either a register reference, a memory reference,
903 or a constant. In the case of a memory reference, the address
904 is checked for general validity for the target machine.
906 Register and memory references must have mode MODE in order to be valid,
907 but some constants have no machine mode and are valid for any mode.
909 If MODE is VOIDmode, OP is checked for validity for whatever mode
912 The main use of this function is as a predicate in match_operand
913 expressions in the machine description.
915 For an explanation of this function's behavior for registers of
916 class NO_REGS, see the comment for `register_operand'. */
919 general_operand (op, mode)
921 enum machine_mode mode;
923 enum rtx_code code = GET_CODE (op);
925 if (mode == VOIDmode)
926 mode = GET_MODE (op);
928 /* Don't accept CONST_INT or anything similar
929 if the caller wants something floating. */
930 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
931 && GET_MODE_CLASS (mode) != MODE_INT
932 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
935 if (GET_CODE (op) == CONST_INT
936 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
940 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
942 #ifdef LEGITIMATE_PIC_OPERAND_P
943 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
945 && LEGITIMATE_CONSTANT_P (op));
947 /* Except for certain constants with VOIDmode, already checked for,
948 OP's mode must match MODE if MODE specifies a mode. */
950 if (GET_MODE (op) != mode)
955 rtx sub = SUBREG_REG (op);
957 #ifdef INSN_SCHEDULING
958 /* On machines that have insn scheduling, we want all memory
959 reference to be explicit, so outlaw paradoxical SUBREGs. */
960 if (GET_CODE (sub) == MEM
961 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
964 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
965 may result in incorrect reference. We should simplify all valid
966 subregs of MEM anyway. But allow this after reload because we
967 might be called from cleanup_subreg_operands.
969 ??? This is a kludge. */
970 if (!reload_completed && SUBREG_BYTE (op) != 0
971 && GET_CODE (sub) == MEM)
974 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
975 create such rtl, and we must reject it. */
976 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
977 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
981 code = GET_CODE (op);
985 /* A register whose class is NO_REGS is not a general operand. */
986 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
987 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
991 rtx y = XEXP (op, 0);
993 if (! volatile_ok && MEM_VOLATILE_P (op))
996 if (GET_CODE (y) == ADDRESSOF)
999 /* Use the mem's mode, since it will be reloaded thus. */
1000 mode = GET_MODE (op);
1001 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
1004 /* Pretend this is an operand for now; we'll run force_operand
1005 on its replacement in fixup_var_refs_1. */
1006 if (code == ADDRESSOF)
1015 /* Return 1 if OP is a valid memory address for a memory reference
1018 The main use of this function is as a predicate in match_operand
1019 expressions in the machine description. */
1022 address_operand (op, mode)
1024 enum machine_mode mode;
1026 return memory_address_p (mode, op);
1029 /* Return 1 if OP is a register reference of mode MODE.
1030 If MODE is VOIDmode, accept a register in any mode.
1032 The main use of this function is as a predicate in match_operand
1033 expressions in the machine description.
1035 As a special exception, registers whose class is NO_REGS are
1036 not accepted by `register_operand'. The reason for this change
1037 is to allow the representation of special architecture artifacts
1038 (such as a condition code register) without extending the rtl
1039 definitions. Since registers of class NO_REGS cannot be used
1040 as registers in any case where register classes are examined,
1041 it is most consistent to keep this function from accepting them. */
1044 register_operand (op, mode)
1046 enum machine_mode mode;
1048 if (GET_MODE (op) != mode && mode != VOIDmode)
1051 if (GET_CODE (op) == SUBREG)
1053 rtx sub = SUBREG_REG (op);
1055 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1056 because it is guaranteed to be reloaded into one.
1057 Just make sure the MEM is valid in itself.
1058 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1059 but currently it does result from (SUBREG (REG)...) where the
1060 reg went on the stack.) */
1061 if (! reload_completed && GET_CODE (sub) == MEM)
1062 return general_operand (op, mode);
1064 #ifdef CLASS_CANNOT_CHANGE_MODE
1065 if (GET_CODE (sub) == REG
1066 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1067 && (TEST_HARD_REG_BIT
1068 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
1070 && CLASS_CANNOT_CHANGE_MODE_P (mode, GET_MODE (sub))
1071 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1072 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1076 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1077 create such rtl, and we must reject it. */
1078 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1079 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1085 /* If we have an ADDRESSOF, consider it valid since it will be
1086 converted into something that will not be a MEM. */
1087 if (GET_CODE (op) == ADDRESSOF)
1090 /* We don't consider registers whose class is NO_REGS
1091 to be a register operand. */
1092 return (GET_CODE (op) == REG
1093 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1094 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1097 /* Return 1 for a register in Pmode; ignore the tested mode. */
1100 pmode_register_operand (op, mode)
1102 enum machine_mode mode ATTRIBUTE_UNUSED;
1104 return register_operand (op, Pmode);
1107 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1108 or a hard register. */
1111 scratch_operand (op, mode)
1113 enum machine_mode mode;
1115 if (GET_MODE (op) != mode && mode != VOIDmode)
1118 return (GET_CODE (op) == SCRATCH
1119 || (GET_CODE (op) == REG
1120 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1123 /* Return 1 if OP is a valid immediate operand for mode MODE.
1125 The main use of this function is as a predicate in match_operand
1126 expressions in the machine description. */
1129 immediate_operand (op, mode)
1131 enum machine_mode mode;
1133 /* Don't accept CONST_INT or anything similar
1134 if the caller wants something floating. */
1135 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1136 && GET_MODE_CLASS (mode) != MODE_INT
1137 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1140 if (GET_CODE (op) == CONST_INT
1141 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1144 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1145 result in 0/1. It seems a safe assumption that this is
1146 in range for everyone. */
1147 if (GET_CODE (op) == CONSTANT_P_RTX)
1150 return (CONSTANT_P (op)
1151 && (GET_MODE (op) == mode || mode == VOIDmode
1152 || GET_MODE (op) == VOIDmode)
1153 #ifdef LEGITIMATE_PIC_OPERAND_P
1154 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1156 && LEGITIMATE_CONSTANT_P (op));
1159 /* Returns 1 if OP is an operand that is a CONST_INT. */
1162 const_int_operand (op, mode)
1164 enum machine_mode mode;
1166 if (GET_CODE (op) != CONST_INT)
1169 if (mode != VOIDmode
1170 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1176 /* Returns 1 if OP is an operand that is a constant integer or constant
1177 floating-point number. */
1180 const_double_operand (op, mode)
1182 enum machine_mode mode;
1184 /* Don't accept CONST_INT or anything similar
1185 if the caller wants something floating. */
1186 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1187 && GET_MODE_CLASS (mode) != MODE_INT
1188 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1191 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1192 && (mode == VOIDmode || GET_MODE (op) == mode
1193 || GET_MODE (op) == VOIDmode));
1196 /* Return 1 if OP is a general operand that is not an immediate operand. */
1199 nonimmediate_operand (op, mode)
1201 enum machine_mode mode;
1203 return (general_operand (op, mode) && ! CONSTANT_P (op));
1206 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1209 nonmemory_operand (op, mode)
1211 enum machine_mode mode;
1213 if (CONSTANT_P (op))
1215 /* Don't accept CONST_INT or anything similar
1216 if the caller wants something floating. */
1217 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1218 && GET_MODE_CLASS (mode) != MODE_INT
1219 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1222 if (GET_CODE (op) == CONST_INT
1223 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1226 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1227 || mode == VOIDmode)
1228 #ifdef LEGITIMATE_PIC_OPERAND_P
1229 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1231 && LEGITIMATE_CONSTANT_P (op));
1234 if (GET_MODE (op) != mode && mode != VOIDmode)
1237 if (GET_CODE (op) == SUBREG)
1239 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1240 because it is guaranteed to be reloaded into one.
1241 Just make sure the MEM is valid in itself.
1242 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1243 but currently it does result from (SUBREG (REG)...) where the
1244 reg went on the stack.) */
1245 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1246 return general_operand (op, mode);
1247 op = SUBREG_REG (op);
1250 /* We don't consider registers whose class is NO_REGS
1251 to be a register operand. */
1252 return (GET_CODE (op) == REG
1253 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1254 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1257 /* Return 1 if OP is a valid operand that stands for pushing a
1258 value of mode MODE onto the stack.
1260 The main use of this function is as a predicate in match_operand
1261 expressions in the machine description. */
1264 push_operand (op, mode)
1266 enum machine_mode mode;
1268 unsigned int rounded_size = GET_MODE_SIZE (mode);
1270 #ifdef PUSH_ROUNDING
1271 rounded_size = PUSH_ROUNDING (rounded_size);
1274 if (GET_CODE (op) != MEM)
1277 if (mode != VOIDmode && GET_MODE (op) != mode)
1282 if (rounded_size == GET_MODE_SIZE (mode))
1284 if (GET_CODE (op) != STACK_PUSH_CODE)
1289 if (GET_CODE (op) != PRE_MODIFY
1290 || GET_CODE (XEXP (op, 1)) != PLUS
1291 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1292 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1293 #ifdef STACK_GROWS_DOWNWARD
1294 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1296 || INTVAL (XEXP (XEXP (op, 1), 1)) != rounded_size
1302 return XEXP (op, 0) == stack_pointer_rtx;
1305 /* Return 1 if OP is a valid operand that stands for popping a
1306 value of mode MODE off the stack.
1308 The main use of this function is as a predicate in match_operand
1309 expressions in the machine description. */
1312 pop_operand (op, mode)
1314 enum machine_mode mode;
1316 if (GET_CODE (op) != MEM)
1319 if (mode != VOIDmode && GET_MODE (op) != mode)
1324 if (GET_CODE (op) != STACK_POP_CODE)
1327 return XEXP (op, 0) == stack_pointer_rtx;
1330 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1333 memory_address_p (mode, addr)
1334 enum machine_mode mode ATTRIBUTE_UNUSED;
1337 if (GET_CODE (addr) == ADDRESSOF)
1340 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1347 /* Return 1 if OP is a valid memory reference with mode MODE,
1348 including a valid address.
1350 The main use of this function is as a predicate in match_operand
1351 expressions in the machine description. */
1354 memory_operand (op, mode)
1356 enum machine_mode mode;
1360 if (! reload_completed)
1361 /* Note that no SUBREG is a memory operand before end of reload pass,
1362 because (SUBREG (MEM...)) forces reloading into a register. */
1363 return GET_CODE (op) == MEM && general_operand (op, mode);
1365 if (mode != VOIDmode && GET_MODE (op) != mode)
1369 if (GET_CODE (inner) == SUBREG)
1370 inner = SUBREG_REG (inner);
1372 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1375 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1376 that is, a memory reference whose address is a general_operand. */
1379 indirect_operand (op, mode)
1381 enum machine_mode mode;
1383 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1384 if (! reload_completed
1385 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1387 int offset = SUBREG_BYTE (op);
1388 rtx inner = SUBREG_REG (op);
1390 if (mode != VOIDmode && GET_MODE (op) != mode)
1393 /* The only way that we can have a general_operand as the resulting
1394 address is if OFFSET is zero and the address already is an operand
1395 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1398 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1399 || (GET_CODE (XEXP (inner, 0)) == PLUS
1400 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1401 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1402 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1405 return (GET_CODE (op) == MEM
1406 && memory_operand (op, mode)
1407 && general_operand (XEXP (op, 0), Pmode));
1410 /* Return 1 if this is a comparison operator. This allows the use of
1411 MATCH_OPERATOR to recognize all the branch insns. */
1414 comparison_operator (op, mode)
1416 enum machine_mode mode;
1418 return ((mode == VOIDmode || GET_MODE (op) == mode)
1419 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1422 /* If BODY is an insn body that uses ASM_OPERANDS,
1423 return the number of operands (both input and output) in the insn.
1424 Otherwise return -1. */
1427 asm_noperands (body)
1430 switch (GET_CODE (body))
1433 /* No output operands: return number of input operands. */
1434 return ASM_OPERANDS_INPUT_LENGTH (body);
1436 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1437 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1438 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1442 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1443 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1445 /* Multiple output operands, or 1 output plus some clobbers:
1446 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1450 /* Count backwards through CLOBBERs to determine number of SETs. */
1451 for (i = XVECLEN (body, 0); i > 0; i--)
1453 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1455 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1459 /* N_SETS is now number of output operands. */
1462 /* Verify that all the SETs we have
1463 came from a single original asm_operands insn
1464 (so that invalid combinations are blocked). */
1465 for (i = 0; i < n_sets; i++)
1467 rtx elt = XVECEXP (body, 0, i);
1468 if (GET_CODE (elt) != SET)
1470 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1472 /* If these ASM_OPERANDS rtx's came from different original insns
1473 then they aren't allowed together. */
1474 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1475 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1478 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1481 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1483 /* 0 outputs, but some clobbers:
1484 body is [(asm_operands ...) (clobber (reg ...))...]. */
1487 /* Make sure all the other parallel things really are clobbers. */
1488 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1489 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1492 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1501 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1502 copy its operands (both input and output) into the vector OPERANDS,
1503 the locations of the operands within the insn into the vector OPERAND_LOCS,
1504 and the constraints for the operands into CONSTRAINTS.
1505 Write the modes of the operands into MODES.
1506 Return the assembler-template.
1508 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1509 we don't store that info. */
1512 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1516 const char **constraints;
1517 enum machine_mode *modes;
1521 const char *template = 0;
1523 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1525 rtx asmop = SET_SRC (body);
1526 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1528 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1530 for (i = 1; i < noperands; i++)
1533 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1535 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1537 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1539 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1542 /* The output is in the SET.
1543 Its constraint is in the ASM_OPERANDS itself. */
1545 operands[0] = SET_DEST (body);
1547 operand_locs[0] = &SET_DEST (body);
1549 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1551 modes[0] = GET_MODE (SET_DEST (body));
1552 template = ASM_OPERANDS_TEMPLATE (asmop);
1554 else if (GET_CODE (body) == ASM_OPERANDS)
1557 /* No output operands: BODY is (asm_operands ....). */
1559 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1561 /* The input operands are found in the 1st element vector. */
1562 /* Constraints for inputs are in the 2nd element vector. */
1563 for (i = 0; i < noperands; i++)
1566 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1568 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1570 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1572 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1574 template = ASM_OPERANDS_TEMPLATE (asmop);
1576 else if (GET_CODE (body) == PARALLEL
1577 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1578 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1580 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1581 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1582 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1583 int nout = 0; /* Does not include CLOBBERs. */
1585 /* At least one output, plus some CLOBBERs. */
1587 /* The outputs are in the SETs.
1588 Their constraints are in the ASM_OPERANDS itself. */
1589 for (i = 0; i < nparallel; i++)
1591 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1592 break; /* Past last SET */
1595 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1597 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1599 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1601 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1605 for (i = 0; i < nin; i++)
1608 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1610 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1612 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1614 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1617 template = ASM_OPERANDS_TEMPLATE (asmop);
1619 else if (GET_CODE (body) == PARALLEL
1620 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1622 /* No outputs, but some CLOBBERs. */
1624 rtx asmop = XVECEXP (body, 0, 0);
1625 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1627 for (i = 0; i < nin; i++)
1630 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1632 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1634 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1636 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1639 template = ASM_OPERANDS_TEMPLATE (asmop);
1645 /* Check if an asm_operand matches it's constraints.
1646 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1649 asm_operand_ok (op, constraint)
1651 const char *constraint;
1655 /* Use constrain_operands after reload. */
1656 if (reload_completed)
1661 char c = *constraint++;
1675 case '0': case '1': case '2': case '3': case '4':
1676 case '5': case '6': case '7': case '8': case '9':
1677 /* For best results, our caller should have given us the
1678 proper matching constraint, but we can't actually fail
1679 the check if they didn't. Indicate that results are
1681 while (ISDIGIT (*constraint))
1687 if (address_operand (op, VOIDmode))
1692 case 'V': /* non-offsettable */
1693 if (memory_operand (op, VOIDmode))
1697 case 'o': /* offsettable */
1698 if (offsettable_nonstrict_memref_p (op))
1703 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1704 excepting those that expand_call created. Further, on some
1705 machines which do not have generalized auto inc/dec, an inc/dec
1706 is not a memory_operand.
1708 Match any memory and hope things are resolved after reload. */
1710 if (GET_CODE (op) == MEM
1712 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1713 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1718 if (GET_CODE (op) == MEM
1720 || GET_CODE (XEXP (op, 0)) == PRE_INC
1721 || GET_CODE (XEXP (op, 0)) == POST_INC))
1726 #ifndef REAL_ARITHMETIC
1727 /* Match any floating double constant, but only if
1728 we can examine the bits of it reliably. */
1729 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
1730 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
1731 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
1737 if (GET_CODE (op) == CONST_DOUBLE)
1742 if (GET_CODE (op) == CONST_DOUBLE
1743 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1747 if (GET_CODE (op) == CONST_DOUBLE
1748 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1753 if (GET_CODE (op) == CONST_INT
1754 || (GET_CODE (op) == CONST_DOUBLE
1755 && GET_MODE (op) == VOIDmode))
1761 #ifdef LEGITIMATE_PIC_OPERAND_P
1762 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1769 if (GET_CODE (op) == CONST_INT
1770 || (GET_CODE (op) == CONST_DOUBLE
1771 && GET_MODE (op) == VOIDmode))
1776 if (GET_CODE (op) == CONST_INT
1777 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1781 if (GET_CODE (op) == CONST_INT
1782 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1786 if (GET_CODE (op) == CONST_INT
1787 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1791 if (GET_CODE (op) == CONST_INT
1792 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1796 if (GET_CODE (op) == CONST_INT
1797 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1801 if (GET_CODE (op) == CONST_INT
1802 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1806 if (GET_CODE (op) == CONST_INT
1807 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1811 if (GET_CODE (op) == CONST_INT
1812 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1820 if (general_operand (op, VOIDmode))
1825 /* For all other letters, we first check for a register class,
1826 otherwise it is an EXTRA_CONSTRAINT. */
1827 if (REG_CLASS_FROM_LETTER (c) != NO_REGS)
1830 if (GET_MODE (op) == BLKmode)
1832 if (register_operand (op, VOIDmode))
1835 #ifdef EXTRA_CONSTRAINT
1836 if (EXTRA_CONSTRAINT (op, c))
1846 /* Given an rtx *P, if it is a sum containing an integer constant term,
1847 return the location (type rtx *) of the pointer to that constant term.
1848 Otherwise, return a null pointer. */
1851 find_constant_term_loc (p)
1855 enum rtx_code code = GET_CODE (*p);
1857 /* If *P IS such a constant term, P is its location. */
1859 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1863 /* Otherwise, if not a sum, it has no constant term. */
1865 if (GET_CODE (*p) != PLUS)
1868 /* If one of the summands is constant, return its location. */
1870 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1871 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1874 /* Otherwise, check each summand for containing a constant term. */
1876 if (XEXP (*p, 0) != 0)
1878 tem = find_constant_term_loc (&XEXP (*p, 0));
1883 if (XEXP (*p, 1) != 0)
1885 tem = find_constant_term_loc (&XEXP (*p, 1));
1893 /* Return 1 if OP is a memory reference
1894 whose address contains no side effects
1895 and remains valid after the addition
1896 of a positive integer less than the
1897 size of the object being referenced.
1899 We assume that the original address is valid and do not check it.
1901 This uses strict_memory_address_p as a subroutine, so
1902 don't use it before reload. */
1905 offsettable_memref_p (op)
1908 return ((GET_CODE (op) == MEM)
1909 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1912 /* Similar, but don't require a strictly valid mem ref:
1913 consider pseudo-regs valid as index or base regs. */
1916 offsettable_nonstrict_memref_p (op)
1919 return ((GET_CODE (op) == MEM)
1920 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1923 /* Return 1 if Y is a memory address which contains no side effects
1924 and would remain valid after the addition of a positive integer
1925 less than the size of that mode.
1927 We assume that the original address is valid and do not check it.
1928 We do check that it is valid for narrower modes.
1930 If STRICTP is nonzero, we require a strictly valid address,
1931 for the sake of use in reload.c. */
1934 offsettable_address_p (strictp, mode, y)
1936 enum machine_mode mode;
1939 enum rtx_code ycode = GET_CODE (y);
1943 int (*addressp) PARAMS ((enum machine_mode, rtx)) =
1944 (strictp ? strict_memory_address_p : memory_address_p);
1945 unsigned int mode_sz = GET_MODE_SIZE (mode);
1947 if (CONSTANT_ADDRESS_P (y))
1950 /* Adjusting an offsettable address involves changing to a narrower mode.
1951 Make sure that's OK. */
1953 if (mode_dependent_address_p (y))
1956 /* ??? How much offset does an offsettable BLKmode reference need?
1957 Clearly that depends on the situation in which it's being used.
1958 However, the current situation in which we test 0xffffffff is
1959 less than ideal. Caveat user. */
1961 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1963 /* If the expression contains a constant term,
1964 see if it remains valid when max possible offset is added. */
1966 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1971 *y2 = plus_constant (*y2, mode_sz - 1);
1972 /* Use QImode because an odd displacement may be automatically invalid
1973 for any wider mode. But it should be valid for a single byte. */
1974 good = (*addressp) (QImode, y);
1976 /* In any case, restore old contents of memory. */
1981 if (GET_RTX_CLASS (ycode) == 'a')
1984 /* The offset added here is chosen as the maximum offset that
1985 any instruction could need to add when operating on something
1986 of the specified mode. We assume that if Y and Y+c are
1987 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1988 go inside a LO_SUM here, so we do so as well. */
1989 if (GET_CODE (y) == LO_SUM
1991 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1992 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1993 plus_constant (XEXP (y, 1), mode_sz - 1));
1995 z = plus_constant (y, mode_sz - 1);
1997 /* Use QImode because an odd displacement may be automatically invalid
1998 for any wider mode. But it should be valid for a single byte. */
1999 return (*addressp) (QImode, z);
2002 /* Return 1 if ADDR is an address-expression whose effect depends
2003 on the mode of the memory reference it is used in.
2005 Autoincrement addressing is a typical example of mode-dependence
2006 because the amount of the increment depends on the mode. */
2009 mode_dependent_address_p (addr)
2010 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
2012 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
2014 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2015 win: ATTRIBUTE_UNUSED_LABEL
2019 /* Return 1 if OP is a general operand
2020 other than a memory ref with a mode dependent address. */
2023 mode_independent_operand (op, mode)
2024 enum machine_mode mode;
2029 if (! general_operand (op, mode))
2032 if (GET_CODE (op) != MEM)
2035 addr = XEXP (op, 0);
2036 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
2038 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2039 lose: ATTRIBUTE_UNUSED_LABEL
2043 /* Like extract_insn, but save insn extracted and don't extract again, when
2044 called again for the same insn expecting that recog_data still contain the
2045 valid information. This is used primary by gen_attr infrastructure that
2046 often does extract insn again and again. */
2048 extract_insn_cached (insn)
2051 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2053 extract_insn (insn);
2054 recog_data.insn = insn;
2056 /* Do cached extract_insn, constrain_operand and complain about failures.
2057 Used by insn_attrtab. */
2059 extract_constrain_insn_cached (insn)
2062 extract_insn_cached (insn);
2063 if (which_alternative == -1
2064 && !constrain_operands (reload_completed))
2065 fatal_insn_not_found (insn);
2067 /* Do cached constrain_operand and complain about failures. */
2069 constrain_operands_cached (strict)
2072 if (which_alternative == -1)
2073 return constrain_operands (strict);
2078 /* Analyze INSN and fill in recog_data. */
2087 rtx body = PATTERN (insn);
2089 recog_data.insn = NULL;
2090 recog_data.n_operands = 0;
2091 recog_data.n_alternatives = 0;
2092 recog_data.n_dups = 0;
2093 which_alternative = -1;
2095 switch (GET_CODE (body))
2105 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2110 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2111 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2112 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2118 recog_data.n_operands = noperands = asm_noperands (body);
2121 /* This insn is an `asm' with operands. */
2123 /* expand_asm_operands makes sure there aren't too many operands. */
2124 if (noperands > MAX_RECOG_OPERANDS)
2127 /* Now get the operand values and constraints out of the insn. */
2128 decode_asm_operands (body, recog_data.operand,
2129 recog_data.operand_loc,
2130 recog_data.constraints,
2131 recog_data.operand_mode);
2134 const char *p = recog_data.constraints[0];
2135 recog_data.n_alternatives = 1;
2137 recog_data.n_alternatives += (*p++ == ',');
2141 fatal_insn_not_found (insn);
2145 /* Ordinary insn: recognize it, get the operands via insn_extract
2146 and get the constraints. */
2148 icode = recog_memoized (insn);
2150 fatal_insn_not_found (insn);
2152 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2153 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2154 recog_data.n_dups = insn_data[icode].n_dups;
2156 insn_extract (insn);
2158 for (i = 0; i < noperands; i++)
2160 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2161 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2162 /* VOIDmode match_operands gets mode from their real operand. */
2163 if (recog_data.operand_mode[i] == VOIDmode)
2164 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2167 for (i = 0; i < noperands; i++)
2168 recog_data.operand_type[i]
2169 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2170 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2173 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2177 /* After calling extract_insn, you can use this function to extract some
2178 information from the constraint strings into a more usable form.
2179 The collected data is stored in recog_op_alt. */
2181 preprocess_constraints ()
2185 memset (recog_op_alt, 0, sizeof recog_op_alt);
2186 for (i = 0; i < recog_data.n_operands; i++)
2189 struct operand_alternative *op_alt;
2190 const char *p = recog_data.constraints[i];
2192 op_alt = recog_op_alt[i];
2194 for (j = 0; j < recog_data.n_alternatives; j++)
2196 op_alt[j].class = NO_REGS;
2197 op_alt[j].constraint = p;
2198 op_alt[j].matches = -1;
2199 op_alt[j].matched = -1;
2201 if (*p == '\0' || *p == ',')
2203 op_alt[j].anything_ok = 1;
2213 while (c != ',' && c != '\0');
2214 if (c == ',' || c == '\0')
2219 case '=': case '+': case '*': case '%':
2220 case 'E': case 'F': case 'G': case 'H':
2221 case 's': case 'i': case 'n':
2222 case 'I': case 'J': case 'K': case 'L':
2223 case 'M': case 'N': case 'O': case 'P':
2224 /* These don't say anything we care about. */
2228 op_alt[j].reject += 6;
2231 op_alt[j].reject += 600;
2234 op_alt[j].earlyclobber = 1;
2237 case '0': case '1': case '2': case '3': case '4':
2238 case '5': case '6': case '7': case '8': case '9':
2241 op_alt[j].matches = strtoul (p - 1, &end, 10);
2242 recog_op_alt[op_alt[j].matches][j].matched = i;
2248 op_alt[j].memory_ok = 1;
2251 op_alt[j].decmem_ok = 1;
2254 op_alt[j].incmem_ok = 1;
2257 op_alt[j].nonoffmem_ok = 1;
2260 op_alt[j].offmem_ok = 1;
2263 op_alt[j].anything_ok = 1;
2267 op_alt[j].is_address = 1;
2268 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class]
2269 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
2273 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2277 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char) c)];
2285 /* Check the operands of an insn against the insn's operand constraints
2286 and return 1 if they are valid.
2287 The information about the insn's operands, constraints, operand modes
2288 etc. is obtained from the global variables set up by extract_insn.
2290 WHICH_ALTERNATIVE is set to a number which indicates which
2291 alternative of constraints was matched: 0 for the first alternative,
2292 1 for the next, etc.
2294 In addition, when two operands are match
2295 and it happens that the output operand is (reg) while the
2296 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2297 make the output operand look like the input.
2298 This is because the output operand is the one the template will print.
2300 This is used in final, just before printing the assembler code and by
2301 the routines that determine an insn's attribute.
2303 If STRICT is a positive non-zero value, it means that we have been
2304 called after reload has been completed. In that case, we must
2305 do all checks strictly. If it is zero, it means that we have been called
2306 before reload has completed. In that case, we first try to see if we can
2307 find an alternative that matches strictly. If not, we try again, this
2308 time assuming that reload will fix up the insn. This provides a "best
2309 guess" for the alternative and is used to compute attributes of insns prior
2310 to reload. A negative value of STRICT is used for this internal call. */
2318 constrain_operands (strict)
2321 const char *constraints[MAX_RECOG_OPERANDS];
2322 int matching_operands[MAX_RECOG_OPERANDS];
2323 int earlyclobber[MAX_RECOG_OPERANDS];
2326 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2327 int funny_match_index;
2329 which_alternative = 0;
2330 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2333 for (c = 0; c < recog_data.n_operands; c++)
2335 constraints[c] = recog_data.constraints[c];
2336 matching_operands[c] = -1;
2343 funny_match_index = 0;
2345 for (opno = 0; opno < recog_data.n_operands; opno++)
2347 rtx op = recog_data.operand[opno];
2348 enum machine_mode mode = GET_MODE (op);
2349 const char *p = constraints[opno];
2354 earlyclobber[opno] = 0;
2356 /* A unary operator may be accepted by the predicate, but it
2357 is irrelevant for matching constraints. */
2358 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2361 if (GET_CODE (op) == SUBREG)
2363 if (GET_CODE (SUBREG_REG (op)) == REG
2364 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2365 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2366 GET_MODE (SUBREG_REG (op)),
2369 op = SUBREG_REG (op);
2372 /* An empty constraint or empty alternative
2373 allows anything which matched the pattern. */
2374 if (*p == 0 || *p == ',')
2377 while (*p && (c = *p++) != ',')
2380 case '?': case '!': case '*': case '%':
2385 /* Ignore rest of this alternative as far as
2386 constraint checking is concerned. */
2387 while (*p && *p != ',')
2392 earlyclobber[opno] = 1;
2395 case '0': case '1': case '2': case '3': case '4':
2396 case '5': case '6': case '7': case '8': case '9':
2398 /* This operand must be the same as a previous one.
2399 This kind of constraint is used for instructions such
2400 as add when they take only two operands.
2402 Note that the lower-numbered operand is passed first.
2404 If we are not testing strictly, assume that this
2405 constraint will be satisfied. */
2410 match = strtoul (p - 1, &end, 10);
2417 rtx op1 = recog_data.operand[match];
2418 rtx op2 = recog_data.operand[opno];
2420 /* A unary operator may be accepted by the predicate,
2421 but it is irrelevant for matching constraints. */
2422 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2423 op1 = XEXP (op1, 0);
2424 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2425 op2 = XEXP (op2, 0);
2427 val = operands_match_p (op1, op2);
2430 matching_operands[opno] = match;
2431 matching_operands[match] = opno;
2436 /* If output is *x and input is *--x, arrange later
2437 to change the output to *--x as well, since the
2438 output op is the one that will be printed. */
2439 if (val == 2 && strict > 0)
2441 funny_match[funny_match_index].this = opno;
2442 funny_match[funny_match_index++].other = match;
2448 /* p is used for address_operands. When we are called by
2449 gen_reload, no one will have checked that the address is
2450 strictly valid, i.e., that all pseudos requiring hard regs
2451 have gotten them. */
2453 || (strict_memory_address_p (recog_data.operand_mode[opno],
2458 /* No need to check general_operand again;
2459 it was done in insn-recog.c. */
2461 /* Anything goes unless it is a REG and really has a hard reg
2462 but the hard reg is not in the class GENERAL_REGS. */
2464 || GENERAL_REGS == ALL_REGS
2465 || GET_CODE (op) != REG
2466 || (reload_in_progress
2467 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2468 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2473 /* This is used for a MATCH_SCRATCH in the cases when
2474 we don't actually need anything. So anything goes
2480 if (GET_CODE (op) == MEM
2481 /* Before reload, accept what reload can turn into mem. */
2482 || (strict < 0 && CONSTANT_P (op))
2483 /* During reload, accept a pseudo */
2484 || (reload_in_progress && GET_CODE (op) == REG
2485 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2490 if (GET_CODE (op) == MEM
2491 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2492 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2497 if (GET_CODE (op) == MEM
2498 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2499 || GET_CODE (XEXP (op, 0)) == POST_INC))
2504 #ifndef REAL_ARITHMETIC
2505 /* Match any CONST_DOUBLE, but only if
2506 we can examine the bits of it reliably. */
2507 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
2508 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
2509 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
2512 if (GET_CODE (op) == CONST_DOUBLE)
2517 if (GET_CODE (op) == CONST_DOUBLE)
2523 if (GET_CODE (op) == CONST_DOUBLE
2524 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2529 if (GET_CODE (op) == CONST_INT
2530 || (GET_CODE (op) == CONST_DOUBLE
2531 && GET_MODE (op) == VOIDmode))
2534 if (CONSTANT_P (op))
2539 if (GET_CODE (op) == CONST_INT
2540 || (GET_CODE (op) == CONST_DOUBLE
2541 && GET_MODE (op) == VOIDmode))
2553 if (GET_CODE (op) == CONST_INT
2554 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2559 if (GET_CODE (op) == MEM
2560 && ((strict > 0 && ! offsettable_memref_p (op))
2562 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2563 || (reload_in_progress
2564 && !(GET_CODE (op) == REG
2565 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2570 if ((strict > 0 && offsettable_memref_p (op))
2571 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2572 /* Before reload, accept what reload can handle. */
2574 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2575 /* During reload, accept a pseudo */
2576 || (reload_in_progress && GET_CODE (op) == REG
2577 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2583 enum reg_class class;
2585 class = (c == 'r' ? GENERAL_REGS : REG_CLASS_FROM_LETTER (c));
2586 if (class != NO_REGS)
2590 && GET_CODE (op) == REG
2591 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2592 || (strict == 0 && GET_CODE (op) == SCRATCH)
2593 || (GET_CODE (op) == REG
2594 && reg_fits_class_p (op, class, offset, mode)))
2597 #ifdef EXTRA_CONSTRAINT
2598 else if (EXTRA_CONSTRAINT (op, c))
2605 constraints[opno] = p;
2606 /* If this operand did not win somehow,
2607 this alternative loses. */
2611 /* This alternative won; the operands are ok.
2612 Change whichever operands this alternative says to change. */
2617 /* See if any earlyclobber operand conflicts with some other
2621 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2622 /* Ignore earlyclobber operands now in memory,
2623 because we would often report failure when we have
2624 two memory operands, one of which was formerly a REG. */
2625 if (earlyclobber[eopno]
2626 && GET_CODE (recog_data.operand[eopno]) == REG)
2627 for (opno = 0; opno < recog_data.n_operands; opno++)
2628 if ((GET_CODE (recog_data.operand[opno]) == MEM
2629 || recog_data.operand_type[opno] != OP_OUT)
2631 /* Ignore things like match_operator operands. */
2632 && *recog_data.constraints[opno] != 0
2633 && ! (matching_operands[opno] == eopno
2634 && operands_match_p (recog_data.operand[opno],
2635 recog_data.operand[eopno]))
2636 && ! safe_from_earlyclobber (recog_data.operand[opno],
2637 recog_data.operand[eopno]))
2642 while (--funny_match_index >= 0)
2644 recog_data.operand[funny_match[funny_match_index].other]
2645 = recog_data.operand[funny_match[funny_match_index].this];
2652 which_alternative++;
2654 while (which_alternative < recog_data.n_alternatives);
2656 which_alternative = -1;
2657 /* If we are about to reject this, but we are not to test strictly,
2658 try a very loose test. Only return failure if it fails also. */
2660 return constrain_operands (-1);
2665 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2666 is a hard reg in class CLASS when its regno is offset by OFFSET
2667 and changed to mode MODE.
2668 If REG occupies multiple hard regs, all of them must be in CLASS. */
2671 reg_fits_class_p (operand, class, offset, mode)
2673 enum reg_class class;
2675 enum machine_mode mode;
2677 int regno = REGNO (operand);
2678 if (regno < FIRST_PSEUDO_REGISTER
2679 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2684 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2686 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2695 /* Split single instruction. Helper function for split_all_insns.
2696 Return last insn in the sequence if successful, or NULL if unsuccessful. */
2704 /* Don't split no-op move insns. These should silently
2705 disappear later in final. Splitting such insns would
2706 break the code that handles REG_NO_CONFLICT blocks. */
2708 else if ((set = single_set (insn)) != NULL && set_noop_p (set))
2710 /* Nops get in the way while scheduling, so delete them
2711 now if register allocation has already been done. It
2712 is too risky to try to do this before register
2713 allocation, and there are unlikely to be very many
2714 nops then anyways. */
2715 if (reload_completed)
2717 PUT_CODE (insn, NOTE);
2718 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2719 NOTE_SOURCE_FILE (insn) = 0;
2724 /* Split insns here to get max fine-grain parallelism. */
2725 rtx first = PREV_INSN (insn);
2726 rtx last = try_split (PATTERN (insn), insn, 1);
2730 /* try_split returns the NOTE that INSN became. */
2731 PUT_CODE (insn, NOTE);
2732 NOTE_SOURCE_FILE (insn) = 0;
2733 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2735 /* ??? Coddle to md files that generate subregs in post-
2736 reload splitters instead of computing the proper
2738 if (reload_completed && first != last)
2740 first = NEXT_INSN (first);
2744 cleanup_subreg_operands (first);
2747 first = NEXT_INSN (first);
2755 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2758 split_all_insns (upd_life)
2765 blocks = sbitmap_alloc (n_basic_blocks);
2766 sbitmap_zero (blocks);
2769 for (i = n_basic_blocks - 1; i >= 0; --i)
2771 basic_block bb = BASIC_BLOCK (i);
2774 for (insn = bb->head; insn ; insn = next)
2778 /* Can't use `next_real_insn' because that might go across
2779 CODE_LABELS and short-out basic blocks. */
2780 next = NEXT_INSN (insn);
2781 last = split_insn (insn);
2784 /* The split sequence may include barrier, but the
2785 BB boundary we are interested in will be set to previous
2788 while (GET_CODE (last) == BARRIER)
2789 last = PREV_INSN (last);
2790 SET_BIT (blocks, i);
2795 if (insn == bb->end)
2805 find_many_sub_basic_blocks (blocks);
2808 if (changed && upd_life)
2810 count_or_remove_death_notes (blocks, 1);
2811 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
2813 #ifdef ENABLE_CHECKING
2814 verify_flow_info ();
2817 sbitmap_free (blocks);
2820 /* Same as split_all_insns, but do not expect CFG to be available.
2821 Used by machine depedent reorg passes. */
2824 split_all_insns_noflow ()
2828 for (insn = get_insns (); insn; insn = next)
2830 next = NEXT_INSN (insn);
2836 #ifdef HAVE_peephole2
2837 struct peep2_insn_data
2843 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2844 static int peep2_current;
2846 /* A non-insn marker indicating the last insn of the block.
2847 The live_before regset for this element is correct, indicating
2848 global_live_at_end for the block. */
2849 #define PEEP2_EOB pc_rtx
2851 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2852 does not exist. Used by the recognizer to find the next insn to match
2853 in a multi-insn pattern. */
2859 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2863 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2864 n -= MAX_INSNS_PER_PEEP2 + 1;
2866 if (peep2_insn_data[n].insn == PEEP2_EOB)
2868 return peep2_insn_data[n].insn;
2871 /* Return true if REGNO is dead before the Nth non-note insn
2875 peep2_regno_dead_p (ofs, regno)
2879 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2882 ofs += peep2_current;
2883 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2884 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2886 if (peep2_insn_data[ofs].insn == NULL_RTX)
2889 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2892 /* Similarly for a REG. */
2895 peep2_reg_dead_p (ofs, reg)
2901 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2904 ofs += peep2_current;
2905 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2906 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2908 if (peep2_insn_data[ofs].insn == NULL_RTX)
2911 regno = REGNO (reg);
2912 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2914 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2919 /* Try to find a hard register of mode MODE, matching the register class in
2920 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2921 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2922 in which case the only condition is that the register must be available
2923 before CURRENT_INSN.
2924 Registers that already have bits set in REG_SET will not be considered.
2926 If an appropriate register is available, it will be returned and the
2927 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2931 peep2_find_free_register (from, to, class_str, mode, reg_set)
2933 const char *class_str;
2934 enum machine_mode mode;
2935 HARD_REG_SET *reg_set;
2937 static int search_ofs;
2938 enum reg_class class;
2942 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
2945 from += peep2_current;
2946 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2947 from -= MAX_INSNS_PER_PEEP2 + 1;
2948 to += peep2_current;
2949 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2950 to -= MAX_INSNS_PER_PEEP2 + 1;
2952 if (peep2_insn_data[from].insn == NULL_RTX)
2954 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2958 HARD_REG_SET this_live;
2960 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2962 if (peep2_insn_data[from].insn == NULL_RTX)
2964 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2965 IOR_HARD_REG_SET (live, this_live);
2968 class = (class_str[0] == 'r' ? GENERAL_REGS
2969 : REG_CLASS_FROM_LETTER (class_str[0]));
2971 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2973 int raw_regno, regno, success, j;
2975 /* Distribute the free registers as much as possible. */
2976 raw_regno = search_ofs + i;
2977 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2978 raw_regno -= FIRST_PSEUDO_REGISTER;
2979 #ifdef REG_ALLOC_ORDER
2980 regno = reg_alloc_order[raw_regno];
2985 /* Don't allocate fixed registers. */
2986 if (fixed_regs[regno])
2988 /* Make sure the register is of the right class. */
2989 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
2991 /* And can support the mode we need. */
2992 if (! HARD_REGNO_MODE_OK (regno, mode))
2994 /* And that we don't create an extra save/restore. */
2995 if (! call_used_regs[regno] && ! regs_ever_live[regno])
2997 /* And we don't clobber traceback for noreturn functions. */
2998 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
2999 && (! reload_completed || frame_pointer_needed))
3003 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3005 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3006 || TEST_HARD_REG_BIT (live, regno + j))
3014 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3015 SET_HARD_REG_BIT (*reg_set, regno + j);
3017 /* Start the next search with the next register. */
3018 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3020 search_ofs = raw_regno;
3022 return gen_rtx_REG (mode, regno);
3030 /* Perform the peephole2 optimization pass. */
3033 peephole2_optimize (dump_file)
3034 FILE *dump_file ATTRIBUTE_UNUSED;
3036 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
3040 #ifdef HAVE_conditional_execution
3044 bool do_cleanup_cfg = false;
3045 bool do_rebuild_jump_labels = false;
3047 /* Initialize the regsets we're going to use. */
3048 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3049 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3050 live = INITIALIZE_REG_SET (rs_heads[i]);
3052 #ifdef HAVE_conditional_execution
3053 blocks = sbitmap_alloc (n_basic_blocks);
3054 sbitmap_zero (blocks);
3057 count_or_remove_death_notes (NULL, 1);
3060 for (b = n_basic_blocks - 1; b >= 0; --b)
3062 basic_block bb = BASIC_BLOCK (b);
3063 struct propagate_block_info *pbi;
3065 /* Indicate that all slots except the last holds invalid data. */
3066 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3067 peep2_insn_data[i].insn = NULL_RTX;
3069 /* Indicate that the last slot contains live_after data. */
3070 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3071 peep2_current = MAX_INSNS_PER_PEEP2;
3073 /* Start up propagation. */
3074 COPY_REG_SET (live, bb->global_live_at_end);
3075 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3077 #ifdef HAVE_conditional_execution
3078 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3080 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3083 for (insn = bb->end; ; insn = prev)
3085 prev = PREV_INSN (insn);
3088 rtx try, before_try, x;
3092 /* Record this insn. */
3093 if (--peep2_current < 0)
3094 peep2_current = MAX_INSNS_PER_PEEP2;
3095 peep2_insn_data[peep2_current].insn = insn;
3096 propagate_one_insn (pbi, insn);
3097 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3099 /* Match the peephole. */
3100 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3103 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3104 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3105 cfg-related call notes. */
3106 for (i = 0; i <= match_len; ++i)
3109 rtx old_insn, new_insn, note;
3111 j = i + peep2_current;
3112 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3113 j -= MAX_INSNS_PER_PEEP2 + 1;
3114 old_insn = peep2_insn_data[j].insn;
3115 if (GET_CODE (old_insn) != CALL_INSN)
3118 new_insn = NULL_RTX;
3119 if (GET_CODE (try) == SEQUENCE)
3120 for (k = XVECLEN (try, 0) - 1; k >= 0; k--)
3122 rtx x = XVECEXP (try, 0, k);
3123 if (GET_CODE (x) == CALL_INSN)
3129 else if (GET_CODE (try) == CALL_INSN)
3134 CALL_INSN_FUNCTION_USAGE (new_insn)
3135 = CALL_INSN_FUNCTION_USAGE (old_insn);
3137 for (note = REG_NOTES (old_insn);
3139 note = XEXP (note, 1))
3140 switch (REG_NOTE_KIND (note))
3144 case REG_ALWAYS_RETURN:
3145 REG_NOTES (new_insn)
3146 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3148 REG_NOTES (new_insn));
3150 /* Discard all other reg notes. */
3154 /* Croak if there is another call in the sequence. */
3155 while (++i <= match_len)
3157 j = i + peep2_current;
3158 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3159 j -= MAX_INSNS_PER_PEEP2 + 1;
3160 old_insn = peep2_insn_data[j].insn;
3161 if (GET_CODE (old_insn) == CALL_INSN)
3167 i = match_len + peep2_current;
3168 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3169 i -= MAX_INSNS_PER_PEEP2 + 1;
3171 note = find_reg_note (peep2_insn_data[i].insn,
3172 REG_EH_REGION, NULL_RTX);
3174 /* Replace the old sequence with the new. */
3175 try = emit_insn_after (try, peep2_insn_data[i].insn);
3176 before_try = PREV_INSN (insn);
3177 delete_insn_chain (insn, peep2_insn_data[i].insn);
3179 /* Re-insert the EH_REGION notes. */
3184 for (eh_edge = bb->succ; eh_edge
3185 ; eh_edge = eh_edge->succ_next)
3186 if (eh_edge->flags & EDGE_EH)
3189 for (x = try ; x != before_try ; x = PREV_INSN (x))
3190 if (GET_CODE (x) == CALL_INSN
3191 || (flag_non_call_exceptions
3192 && may_trap_p (PATTERN (x))
3193 && !find_reg_note (x, REG_EH_REGION, NULL)))
3196 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3200 if (x != bb->end && eh_edge)
3205 nfte = split_block (bb, x);
3206 flags = EDGE_EH | EDGE_ABNORMAL;
3207 if (GET_CODE (x) == CALL_INSN)
3208 flags |= EDGE_ABNORMAL_CALL;
3209 nehe = make_edge (nfte->src, eh_edge->dest,
3212 nehe->probability = eh_edge->probability;
3214 = REG_BR_PROB_BASE - nehe->probability;
3216 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3217 #ifdef HAVE_conditional_execution
3218 SET_BIT (blocks, nfte->dest->index);
3226 /* Converting possibly trapping insn to non-trapping is
3227 possible. Zap dummy outgoing edges. */
3228 do_cleanup_cfg |= purge_dead_edges (bb);
3231 #ifdef HAVE_conditional_execution
3232 /* With conditional execution, we cannot back up the
3233 live information so easily, since the conditional
3234 death data structures are not so self-contained.
3235 So record that we've made a modification to this
3236 block and update life information at the end. */
3237 SET_BIT (blocks, b);
3240 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3241 peep2_insn_data[i].insn = NULL_RTX;
3242 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3244 /* Back up lifetime information past the end of the
3245 newly created sequence. */
3246 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3248 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3250 /* Update life information for the new sequence. */
3257 i = MAX_INSNS_PER_PEEP2;
3258 peep2_insn_data[i].insn = x;
3259 propagate_one_insn (pbi, x);
3260 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3266 /* ??? Should verify that LIVE now matches what we
3267 had before the new sequence. */
3272 /* If we generated a jump instruction, it won't have
3273 JUMP_LABEL set. Recompute after we're done. */
3274 for (x = try; x != before_try; x = PREV_INSN (x))
3275 if (GET_CODE (x) == JUMP_INSN)
3277 do_rebuild_jump_labels = true;
3283 if (insn == bb->head)
3287 free_propagate_block_info (pbi);
3290 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3291 FREE_REG_SET (peep2_insn_data[i].live_before);
3292 FREE_REG_SET (live);
3294 if (do_rebuild_jump_labels)
3295 rebuild_jump_labels (get_insns ());
3297 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3298 we've changed global life since exception handlers are no longer
3303 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
3305 #ifdef HAVE_conditional_execution
3308 count_or_remove_death_notes (blocks, 1);
3309 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3311 sbitmap_free (blocks);
3314 #endif /* HAVE_peephole2 */