1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 /*@@ This file should be rewritten to use an arbitrary precision
24 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
25 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
26 @@ The routines that translate from the ap rep should
27 @@ warn if precision et. al. is lost.
28 @@ This would also make life easier when this technology is used
29 @@ for cross-compilers. */
31 /* The entry points in this file are fold, size_int_wide, size_binop
34 fold takes a tree as argument and returns a simplified tree.
36 size_binop takes a tree code for an arithmetic operation
37 and two operands that are trees, and produces a tree for the
38 result, assuming the type comes from `sizetype'.
40 size_int takes an integer value, and creates a tree constant
41 with type from `sizetype'.
43 force_fit_type takes a constant, an overflowable flag and prior
44 overflow indicators. It forces the value to fit the type and sets
45 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
49 #include "coretypes.h"
61 #include "langhooks.h"
64 /* Non-zero if we are folding constants inside an initializer; zero
66 int folding_initializer = 0;
68 /* The following constants represent a bit based encoding of GCC's
69 comparison operators. This encoding simplifies transformations
70 on relational comparison operators, such as AND and OR. */
71 enum comparison_code {
90 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
91 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
92 static bool negate_mathfn_p (enum built_in_function);
93 static bool negate_expr_p (tree);
94 static tree negate_expr (tree);
95 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
96 static tree associate_trees (tree, tree, enum tree_code, tree);
97 static tree const_binop (enum tree_code, tree, tree, int);
98 static enum comparison_code comparison_to_compcode (enum tree_code);
99 static enum tree_code compcode_to_comparison (enum comparison_code);
100 static tree combine_comparisons (enum tree_code, enum tree_code,
101 enum tree_code, tree, tree, tree);
102 static int truth_value_p (enum tree_code);
103 static int operand_equal_for_comparison_p (tree, tree, tree);
104 static int twoval_comparison_p (tree, tree *, tree *, int *);
105 static tree eval_subst (tree, tree, tree, tree, tree);
106 static tree pedantic_omit_one_operand (tree, tree, tree);
107 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
108 static tree make_bit_field_ref (tree, tree, int, int, int);
109 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
110 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
111 enum machine_mode *, int *, int *,
113 static int all_ones_mask_p (tree, int);
114 static tree sign_bit_p (tree, tree);
115 static int simple_operand_p (tree);
116 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
117 static tree range_predecessor (tree);
118 static tree range_successor (tree);
119 static tree make_range (tree, int *, tree *, tree *, bool *);
120 static tree build_range_check (tree, tree, int, tree, tree);
121 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
123 static tree fold_range_test (enum tree_code, tree, tree, tree);
124 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
125 static tree unextend (tree, int, int, tree);
126 static tree fold_truthop (enum tree_code, tree, tree, tree);
127 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
128 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
129 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
130 static int multiple_of_p (tree, tree, tree);
131 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
134 static bool fold_real_zero_addition_p (tree, tree, int);
135 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
137 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
138 static tree fold_div_compare (enum tree_code, tree, tree, tree);
139 static bool reorder_operands_p (tree, tree);
140 static tree fold_negate_const (tree, tree);
141 static tree fold_not_const (tree, tree);
142 static tree fold_relational_const (enum tree_code, tree, tree, tree);
143 static int native_encode_expr (tree, unsigned char *, int);
144 static tree native_interpret_expr (tree, unsigned char *, int);
147 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
148 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
149 and SUM1. Then this yields nonzero if overflow occurred during the
152 Overflow occurs if A and B have the same sign, but A and SUM differ in
153 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
155 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
157 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
158 We do that by representing the two-word integer in 4 words, with only
159 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
160 number. The value of the word is LOWPART + HIGHPART * BASE. */
163 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
164 #define HIGHPART(x) \
165 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
166 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
168 /* Unpack a two-word integer into 4 words.
169 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
170 WORDS points to the array of HOST_WIDE_INTs. */
173 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
175 words[0] = LOWPART (low);
176 words[1] = HIGHPART (low);
177 words[2] = LOWPART (hi);
178 words[3] = HIGHPART (hi);
181 /* Pack an array of 4 words into a two-word integer.
182 WORDS points to the array of words.
183 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
186 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
189 *low = words[0] + words[1] * BASE;
190 *hi = words[2] + words[3] * BASE;
193 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
194 in overflow of the value, when >0 we are only interested in signed
195 overflow, for <0 we are interested in any overflow. OVERFLOWED
196 indicates whether overflow has already occurred. CONST_OVERFLOWED
197 indicates whether constant overflow has already occurred. We force
198 T's value to be within range of T's type (by setting to 0 or 1 all
199 the bits outside the type's range). We set TREE_OVERFLOWED if,
200 OVERFLOWED is nonzero,
201 or OVERFLOWABLE is >0 and signed overflow occurs
202 or OVERFLOWABLE is <0 and any overflow occurs
203 We set TREE_CONSTANT_OVERFLOWED if,
204 CONST_OVERFLOWED is nonzero
205 or we set TREE_OVERFLOWED.
206 We return either the original T, or a copy. */
209 force_fit_type (tree t, int overflowable,
210 bool overflowed, bool overflowed_const)
212 unsigned HOST_WIDE_INT low;
215 int sign_extended_type;
217 gcc_assert (TREE_CODE (t) == INTEGER_CST);
219 low = TREE_INT_CST_LOW (t);
220 high = TREE_INT_CST_HIGH (t);
222 if (POINTER_TYPE_P (TREE_TYPE (t))
223 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
226 prec = TYPE_PRECISION (TREE_TYPE (t));
227 /* Size types *are* sign extended. */
228 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
229 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
230 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
232 /* First clear all bits that are beyond the type's precision. */
234 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
236 else if (prec > HOST_BITS_PER_WIDE_INT)
237 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
241 if (prec < HOST_BITS_PER_WIDE_INT)
242 low &= ~((HOST_WIDE_INT) (-1) << prec);
245 if (!sign_extended_type)
246 /* No sign extension */;
247 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
248 /* Correct width already. */;
249 else if (prec > HOST_BITS_PER_WIDE_INT)
251 /* Sign extend top half? */
252 if (high & ((unsigned HOST_WIDE_INT)1
253 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
254 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
256 else if (prec == HOST_BITS_PER_WIDE_INT)
258 if ((HOST_WIDE_INT)low < 0)
263 /* Sign extend bottom half? */
264 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
267 low |= (HOST_WIDE_INT)(-1) << prec;
271 /* If the value changed, return a new node. */
272 if (overflowed || overflowed_const
273 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
275 t = build_int_cst_wide (TREE_TYPE (t), low, high);
279 || (overflowable > 0 && sign_extended_type))
282 TREE_OVERFLOW (t) = 1;
283 TREE_CONSTANT_OVERFLOW (t) = 1;
285 else if (overflowed_const)
288 TREE_CONSTANT_OVERFLOW (t) = 1;
295 /* Add two doubleword integers with doubleword result.
296 Return nonzero if the operation overflows according to UNSIGNED_P.
297 Each argument is given as two `HOST_WIDE_INT' pieces.
298 One argument is L1 and H1; the other, L2 and H2.
299 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
302 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
303 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
304 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
307 unsigned HOST_WIDE_INT l;
311 h = h1 + h2 + (l < l1);
317 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
319 return OVERFLOW_SUM_SIGN (h1, h2, h);
322 /* Negate a doubleword integer with doubleword result.
323 Return nonzero if the operation overflows, assuming it's signed.
324 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
325 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
328 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
329 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
335 return (*hv & h1) < 0;
345 /* Multiply two doubleword integers with doubleword result.
346 Return nonzero if the operation overflows according to UNSIGNED_P.
347 Each argument is given as two `HOST_WIDE_INT' pieces.
348 One argument is L1 and H1; the other, L2 and H2.
349 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
352 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
353 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
354 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
357 HOST_WIDE_INT arg1[4];
358 HOST_WIDE_INT arg2[4];
359 HOST_WIDE_INT prod[4 * 2];
360 unsigned HOST_WIDE_INT carry;
362 unsigned HOST_WIDE_INT toplow, neglow;
363 HOST_WIDE_INT tophigh, neghigh;
365 encode (arg1, l1, h1);
366 encode (arg2, l2, h2);
368 memset (prod, 0, sizeof prod);
370 for (i = 0; i < 4; i++)
373 for (j = 0; j < 4; j++)
376 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
377 carry += arg1[i] * arg2[j];
378 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
380 prod[k] = LOWPART (carry);
381 carry = HIGHPART (carry);
386 decode (prod, lv, hv);
387 decode (prod + 4, &toplow, &tophigh);
389 /* Unsigned overflow is immediate. */
391 return (toplow | tophigh) != 0;
393 /* Check for signed overflow by calculating the signed representation of the
394 top half of the result; it should agree with the low half's sign bit. */
397 neg_double (l2, h2, &neglow, &neghigh);
398 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
402 neg_double (l1, h1, &neglow, &neghigh);
403 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
405 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
408 /* Shift the doubleword integer in L1, H1 left by COUNT places
409 keeping only PREC bits of result.
410 Shift right if COUNT is negative.
411 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
412 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
415 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
416 HOST_WIDE_INT count, unsigned int prec,
417 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
419 unsigned HOST_WIDE_INT signmask;
423 rshift_double (l1, h1, -count, prec, lv, hv, arith);
427 if (SHIFT_COUNT_TRUNCATED)
430 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
432 /* Shifting by the host word size is undefined according to the
433 ANSI standard, so we must handle this as a special case. */
437 else if (count >= HOST_BITS_PER_WIDE_INT)
439 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
444 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
445 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
449 /* Sign extend all bits that are beyond the precision. */
451 signmask = -((prec > HOST_BITS_PER_WIDE_INT
452 ? ((unsigned HOST_WIDE_INT) *hv
453 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
454 : (*lv >> (prec - 1))) & 1);
456 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
458 else if (prec >= HOST_BITS_PER_WIDE_INT)
460 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
461 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
466 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
467 *lv |= signmask << prec;
471 /* Shift the doubleword integer in L1, H1 right by COUNT places
472 keeping only PREC bits of result. COUNT must be positive.
473 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
474 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
477 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
478 HOST_WIDE_INT count, unsigned int prec,
479 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
482 unsigned HOST_WIDE_INT signmask;
485 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
488 if (SHIFT_COUNT_TRUNCATED)
491 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
493 /* Shifting by the host word size is undefined according to the
494 ANSI standard, so we must handle this as a special case. */
498 else if (count >= HOST_BITS_PER_WIDE_INT)
501 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
505 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
507 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
510 /* Zero / sign extend all bits that are beyond the precision. */
512 if (count >= (HOST_WIDE_INT)prec)
517 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
519 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
521 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
522 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
527 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
528 *lv |= signmask << (prec - count);
532 /* Rotate the doubleword integer in L1, H1 left by COUNT places
533 keeping only PREC bits of result.
534 Rotate right if COUNT is negative.
535 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
538 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
539 HOST_WIDE_INT count, unsigned int prec,
540 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
542 unsigned HOST_WIDE_INT s1l, s2l;
543 HOST_WIDE_INT s1h, s2h;
549 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
550 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
555 /* Rotate the doubleword integer in L1, H1 left by COUNT places
556 keeping only PREC bits of result. COUNT must be positive.
557 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
560 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
561 HOST_WIDE_INT count, unsigned int prec,
562 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
564 unsigned HOST_WIDE_INT s1l, s2l;
565 HOST_WIDE_INT s1h, s2h;
571 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
572 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
577 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
578 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
579 CODE is a tree code for a kind of division, one of
580 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
582 It controls how the quotient is rounded to an integer.
583 Return nonzero if the operation overflows.
584 UNS nonzero says do unsigned division. */
587 div_and_round_double (enum tree_code code, int uns,
588 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
589 HOST_WIDE_INT hnum_orig,
590 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
591 HOST_WIDE_INT hden_orig,
592 unsigned HOST_WIDE_INT *lquo,
593 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
597 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
598 HOST_WIDE_INT den[4], quo[4];
600 unsigned HOST_WIDE_INT work;
601 unsigned HOST_WIDE_INT carry = 0;
602 unsigned HOST_WIDE_INT lnum = lnum_orig;
603 HOST_WIDE_INT hnum = hnum_orig;
604 unsigned HOST_WIDE_INT lden = lden_orig;
605 HOST_WIDE_INT hden = hden_orig;
608 if (hden == 0 && lden == 0)
609 overflow = 1, lden = 1;
611 /* Calculate quotient sign and convert operands to unsigned. */
617 /* (minimum integer) / (-1) is the only overflow case. */
618 if (neg_double (lnum, hnum, &lnum, &hnum)
619 && ((HOST_WIDE_INT) lden & hden) == -1)
625 neg_double (lden, hden, &lden, &hden);
629 if (hnum == 0 && hden == 0)
630 { /* single precision */
632 /* This unsigned division rounds toward zero. */
638 { /* trivial case: dividend < divisor */
639 /* hden != 0 already checked. */
646 memset (quo, 0, sizeof quo);
648 memset (num, 0, sizeof num); /* to zero 9th element */
649 memset (den, 0, sizeof den);
651 encode (num, lnum, hnum);
652 encode (den, lden, hden);
654 /* Special code for when the divisor < BASE. */
655 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
657 /* hnum != 0 already checked. */
658 for (i = 4 - 1; i >= 0; i--)
660 work = num[i] + carry * BASE;
661 quo[i] = work / lden;
667 /* Full double precision division,
668 with thanks to Don Knuth's "Seminumerical Algorithms". */
669 int num_hi_sig, den_hi_sig;
670 unsigned HOST_WIDE_INT quo_est, scale;
672 /* Find the highest nonzero divisor digit. */
673 for (i = 4 - 1;; i--)
680 /* Insure that the first digit of the divisor is at least BASE/2.
681 This is required by the quotient digit estimation algorithm. */
683 scale = BASE / (den[den_hi_sig] + 1);
685 { /* scale divisor and dividend */
687 for (i = 0; i <= 4 - 1; i++)
689 work = (num[i] * scale) + carry;
690 num[i] = LOWPART (work);
691 carry = HIGHPART (work);
696 for (i = 0; i <= 4 - 1; i++)
698 work = (den[i] * scale) + carry;
699 den[i] = LOWPART (work);
700 carry = HIGHPART (work);
701 if (den[i] != 0) den_hi_sig = i;
708 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
710 /* Guess the next quotient digit, quo_est, by dividing the first
711 two remaining dividend digits by the high order quotient digit.
712 quo_est is never low and is at most 2 high. */
713 unsigned HOST_WIDE_INT tmp;
715 num_hi_sig = i + den_hi_sig + 1;
716 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
717 if (num[num_hi_sig] != den[den_hi_sig])
718 quo_est = work / den[den_hi_sig];
722 /* Refine quo_est so it's usually correct, and at most one high. */
723 tmp = work - quo_est * den[den_hi_sig];
725 && (den[den_hi_sig - 1] * quo_est
726 > (tmp * BASE + num[num_hi_sig - 2])))
729 /* Try QUO_EST as the quotient digit, by multiplying the
730 divisor by QUO_EST and subtracting from the remaining dividend.
731 Keep in mind that QUO_EST is the I - 1st digit. */
734 for (j = 0; j <= den_hi_sig; j++)
736 work = quo_est * den[j] + carry;
737 carry = HIGHPART (work);
738 work = num[i + j] - LOWPART (work);
739 num[i + j] = LOWPART (work);
740 carry += HIGHPART (work) != 0;
743 /* If quo_est was high by one, then num[i] went negative and
744 we need to correct things. */
745 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
748 carry = 0; /* add divisor back in */
749 for (j = 0; j <= den_hi_sig; j++)
751 work = num[i + j] + den[j] + carry;
752 carry = HIGHPART (work);
753 num[i + j] = LOWPART (work);
756 num [num_hi_sig] += carry;
759 /* Store the quotient digit. */
764 decode (quo, lquo, hquo);
767 /* If result is negative, make it so. */
769 neg_double (*lquo, *hquo, lquo, hquo);
771 /* Compute trial remainder: rem = num - (quo * den) */
772 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
773 neg_double (*lrem, *hrem, lrem, hrem);
774 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
779 case TRUNC_MOD_EXPR: /* round toward zero */
780 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
784 case FLOOR_MOD_EXPR: /* round toward negative infinity */
785 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
788 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
796 case CEIL_MOD_EXPR: /* round toward positive infinity */
797 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
799 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
807 case ROUND_MOD_EXPR: /* round to closest integer */
809 unsigned HOST_WIDE_INT labs_rem = *lrem;
810 HOST_WIDE_INT habs_rem = *hrem;
811 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
812 HOST_WIDE_INT habs_den = hden, htwice;
814 /* Get absolute values. */
816 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
818 neg_double (lden, hden, &labs_den, &habs_den);
820 /* If (2 * abs (lrem) >= abs (lden)) */
821 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
822 labs_rem, habs_rem, <wice, &htwice);
824 if (((unsigned HOST_WIDE_INT) habs_den
825 < (unsigned HOST_WIDE_INT) htwice)
826 || (((unsigned HOST_WIDE_INT) habs_den
827 == (unsigned HOST_WIDE_INT) htwice)
828 && (labs_den < ltwice)))
832 add_double (*lquo, *hquo,
833 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
836 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
848 /* Compute true remainder: rem = num - (quo * den) */
849 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
850 neg_double (*lrem, *hrem, lrem, hrem);
851 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
855 /* If ARG2 divides ARG1 with zero remainder, carries out the division
856 of type CODE and returns the quotient.
857 Otherwise returns NULL_TREE. */
860 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
862 unsigned HOST_WIDE_INT int1l, int2l;
863 HOST_WIDE_INT int1h, int2h;
864 unsigned HOST_WIDE_INT quol, reml;
865 HOST_WIDE_INT quoh, remh;
866 tree type = TREE_TYPE (arg1);
867 int uns = TYPE_UNSIGNED (type);
869 int1l = TREE_INT_CST_LOW (arg1);
870 int1h = TREE_INT_CST_HIGH (arg1);
871 int2l = TREE_INT_CST_LOW (arg2);
872 int2h = TREE_INT_CST_HIGH (arg2);
874 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
875 &quol, &quoh, &reml, &remh);
876 if (remh != 0 || reml != 0)
879 return build_int_cst_wide (type, quol, quoh);
882 /* This is non-zero if we should defer warnings about undefined
883 overflow. This facility exists because these warnings are a
884 special case. The code to estimate loop iterations does not want
885 to issue any warnings, since it works with expressions which do not
886 occur in user code. Various bits of cleanup code call fold(), but
887 only use the result if it has certain characteristics (e.g., is a
888 constant); that code only wants to issue a warning if the result is
891 static int fold_deferring_overflow_warnings;
893 /* If a warning about undefined overflow is deferred, this is the
894 warning. Note that this may cause us to turn two warnings into
895 one, but that is fine since it is sufficient to only give one
896 warning per expression. */
898 static const char* fold_deferred_overflow_warning;
900 /* If a warning about undefined overflow is deferred, this is the
901 level at which the warning should be emitted. */
903 static enum warn_strict_overflow_code fold_deferred_overflow_code;
905 /* Start deferring overflow warnings. We could use a stack here to
906 permit nested calls, but at present it is not necessary. */
909 fold_defer_overflow_warnings (void)
911 ++fold_deferring_overflow_warnings;
914 /* Stop deferring overflow warnings. If there is a pending warning,
915 and ISSUE is true, then issue the warning if appropriate. STMT is
916 the statement with which the warning should be associated (used for
917 location information); STMT may be NULL. CODE is the level of the
918 warning--a warn_strict_overflow_code value. This function will use
919 the smaller of CODE and the deferred code when deciding whether to
920 issue the warning. CODE may be zero to mean to always use the
924 fold_undefer_overflow_warnings (bool issue, tree stmt, int code)
929 gcc_assert (fold_deferring_overflow_warnings > 0);
930 --fold_deferring_overflow_warnings;
931 if (fold_deferring_overflow_warnings > 0)
933 if (fold_deferred_overflow_warning != NULL
935 && code < (int) fold_deferred_overflow_code)
936 fold_deferred_overflow_code = code;
940 warnmsg = fold_deferred_overflow_warning;
941 fold_deferred_overflow_warning = NULL;
943 if (!issue || warnmsg == NULL)
946 /* Use the smallest code level when deciding to issue the
948 if (code == 0 || code > (int) fold_deferred_overflow_code)
949 code = fold_deferred_overflow_code;
951 if (!issue_strict_overflow_warning (code))
954 if (stmt == NULL_TREE || !EXPR_HAS_LOCATION (stmt))
955 locus = input_location;
957 locus = EXPR_LOCATION (stmt);
958 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
961 /* Stop deferring overflow warnings, ignoring any deferred
965 fold_undefer_and_ignore_overflow_warnings (void)
967 fold_undefer_overflow_warnings (false, NULL_TREE, 0);
970 /* Whether we are deferring overflow warnings. */
973 fold_deferring_overflow_warnings_p (void)
975 return fold_deferring_overflow_warnings > 0;
978 /* This is called when we fold something based on the fact that signed
979 overflow is undefined. */
982 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
984 gcc_assert (!flag_wrapv && !flag_trapv);
985 if (fold_deferring_overflow_warnings > 0)
987 if (fold_deferred_overflow_warning == NULL
988 || wc < fold_deferred_overflow_code)
990 fold_deferred_overflow_warning = gmsgid;
991 fold_deferred_overflow_code = wc;
994 else if (issue_strict_overflow_warning (wc))
995 warning (OPT_Wstrict_overflow, "%s", gmsgid);
998 /* Return true if the built-in mathematical function specified by CODE
999 is odd, i.e. -f(x) == f(-x). */
1002 negate_mathfn_p (enum built_in_function code)
1006 CASE_FLT_FN (BUILT_IN_ASIN):
1007 CASE_FLT_FN (BUILT_IN_ASINH):
1008 CASE_FLT_FN (BUILT_IN_ATAN):
1009 CASE_FLT_FN (BUILT_IN_ATANH):
1010 CASE_FLT_FN (BUILT_IN_CBRT):
1011 CASE_FLT_FN (BUILT_IN_SIN):
1012 CASE_FLT_FN (BUILT_IN_SINH):
1013 CASE_FLT_FN (BUILT_IN_TAN):
1014 CASE_FLT_FN (BUILT_IN_TANH):
1023 /* Check whether we may negate an integer constant T without causing
1027 may_negate_without_overflow_p (tree t)
1029 unsigned HOST_WIDE_INT val;
1033 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1035 type = TREE_TYPE (t);
1036 if (TYPE_UNSIGNED (type))
1039 prec = TYPE_PRECISION (type);
1040 if (prec > HOST_BITS_PER_WIDE_INT)
1042 if (TREE_INT_CST_LOW (t) != 0)
1044 prec -= HOST_BITS_PER_WIDE_INT;
1045 val = TREE_INT_CST_HIGH (t);
1048 val = TREE_INT_CST_LOW (t);
1049 if (prec < HOST_BITS_PER_WIDE_INT)
1050 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1051 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1054 /* Determine whether an expression T can be cheaply negated using
1055 the function negate_expr without introducing undefined overflow. */
1058 negate_expr_p (tree t)
1065 type = TREE_TYPE (t);
1067 STRIP_SIGN_NOPS (t);
1068 switch (TREE_CODE (t))
1071 if (TYPE_OVERFLOW_WRAPS (type))
1074 /* Check that -CST will not overflow type. */
1075 return may_negate_without_overflow_p (t);
1077 return (INTEGRAL_TYPE_P (type)
1078 && TYPE_OVERFLOW_WRAPS (type));
1085 return negate_expr_p (TREE_REALPART (t))
1086 && negate_expr_p (TREE_IMAGPART (t));
1089 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
1091 /* -(A + B) -> (-B) - A. */
1092 if (negate_expr_p (TREE_OPERAND (t, 1))
1093 && reorder_operands_p (TREE_OPERAND (t, 0),
1094 TREE_OPERAND (t, 1)))
1096 /* -(A + B) -> (-A) - B. */
1097 return negate_expr_p (TREE_OPERAND (t, 0));
1100 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1101 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1102 && reorder_operands_p (TREE_OPERAND (t, 0),
1103 TREE_OPERAND (t, 1));
1106 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1112 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1113 return negate_expr_p (TREE_OPERAND (t, 1))
1114 || negate_expr_p (TREE_OPERAND (t, 0));
1117 case TRUNC_DIV_EXPR:
1118 case ROUND_DIV_EXPR:
1119 case FLOOR_DIV_EXPR:
1121 case EXACT_DIV_EXPR:
1122 /* In general we can't negate A / B, because if A is INT_MIN and
1123 B is 1, we may turn this into INT_MIN / -1 which is undefined
1124 and actually traps on some architectures. But if overflow is
1125 undefined, we can negate, because - (INT_MIN / 1) is an
1127 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1128 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1130 return negate_expr_p (TREE_OPERAND (t, 1))
1131 || negate_expr_p (TREE_OPERAND (t, 0));
1134 /* Negate -((double)float) as (double)(-float). */
1135 if (TREE_CODE (type) == REAL_TYPE)
1137 tree tem = strip_float_extensions (t);
1139 return negate_expr_p (tem);
1144 /* Negate -f(x) as f(-x). */
1145 if (negate_mathfn_p (builtin_mathfn_code (t)))
1146 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1150 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1151 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1153 tree op1 = TREE_OPERAND (t, 1);
1154 if (TREE_INT_CST_HIGH (op1) == 0
1155 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1156 == TREE_INT_CST_LOW (op1))
1167 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1168 simplification is possible.
1169 If negate_expr_p would return true for T, NULL_TREE will never be
1173 fold_negate_expr (tree t)
1175 tree type = TREE_TYPE (t);
1178 switch (TREE_CODE (t))
1180 /* Convert - (~A) to A + 1. */
1182 if (INTEGRAL_TYPE_P (type))
1183 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1184 build_int_cst (type, 1));
1188 tem = fold_negate_const (t, type);
1189 if (!TREE_OVERFLOW (tem)
1190 || !TYPE_OVERFLOW_TRAPS (type))
1195 tem = fold_negate_const (t, type);
1196 /* Two's complement FP formats, such as c4x, may overflow. */
1197 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1203 tree rpart = negate_expr (TREE_REALPART (t));
1204 tree ipart = negate_expr (TREE_IMAGPART (t));
1206 if ((TREE_CODE (rpart) == REAL_CST
1207 && TREE_CODE (ipart) == REAL_CST)
1208 || (TREE_CODE (rpart) == INTEGER_CST
1209 && TREE_CODE (ipart) == INTEGER_CST))
1210 return build_complex (type, rpart, ipart);
1215 return TREE_OPERAND (t, 0);
1218 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1220 /* -(A + B) -> (-B) - A. */
1221 if (negate_expr_p (TREE_OPERAND (t, 1))
1222 && reorder_operands_p (TREE_OPERAND (t, 0),
1223 TREE_OPERAND (t, 1)))
1225 tem = negate_expr (TREE_OPERAND (t, 1));
1226 return fold_build2 (MINUS_EXPR, type,
1227 tem, TREE_OPERAND (t, 0));
1230 /* -(A + B) -> (-A) - B. */
1231 if (negate_expr_p (TREE_OPERAND (t, 0)))
1233 tem = negate_expr (TREE_OPERAND (t, 0));
1234 return fold_build2 (MINUS_EXPR, type,
1235 tem, TREE_OPERAND (t, 1));
1241 /* - (A - B) -> B - A */
1242 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1243 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1244 return fold_build2 (MINUS_EXPR, type,
1245 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1249 if (TYPE_UNSIGNED (type))
1255 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1257 tem = TREE_OPERAND (t, 1);
1258 if (negate_expr_p (tem))
1259 return fold_build2 (TREE_CODE (t), type,
1260 TREE_OPERAND (t, 0), negate_expr (tem));
1261 tem = TREE_OPERAND (t, 0);
1262 if (negate_expr_p (tem))
1263 return fold_build2 (TREE_CODE (t), type,
1264 negate_expr (tem), TREE_OPERAND (t, 1));
1268 case TRUNC_DIV_EXPR:
1269 case ROUND_DIV_EXPR:
1270 case FLOOR_DIV_EXPR:
1272 case EXACT_DIV_EXPR:
1273 /* In general we can't negate A / B, because if A is INT_MIN and
1274 B is 1, we may turn this into INT_MIN / -1 which is undefined
1275 and actually traps on some architectures. But if overflow is
1276 undefined, we can negate, because - (INT_MIN / 1) is an
1278 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1280 const char * const warnmsg = G_("assuming signed overflow does not "
1281 "occur when negating a division");
1282 tem = TREE_OPERAND (t, 1);
1283 if (negate_expr_p (tem))
1285 if (INTEGRAL_TYPE_P (type)
1286 && (TREE_CODE (tem) != INTEGER_CST
1287 || integer_onep (tem)))
1288 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1289 return fold_build2 (TREE_CODE (t), type,
1290 TREE_OPERAND (t, 0), negate_expr (tem));
1292 tem = TREE_OPERAND (t, 0);
1293 if (negate_expr_p (tem))
1295 if (INTEGRAL_TYPE_P (type)
1296 && (TREE_CODE (tem) != INTEGER_CST
1297 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1298 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1299 return fold_build2 (TREE_CODE (t), type,
1300 negate_expr (tem), TREE_OPERAND (t, 1));
1306 /* Convert -((double)float) into (double)(-float). */
1307 if (TREE_CODE (type) == REAL_TYPE)
1309 tem = strip_float_extensions (t);
1310 if (tem != t && negate_expr_p (tem))
1311 return negate_expr (tem);
1316 /* Negate -f(x) as f(-x). */
1317 if (negate_mathfn_p (builtin_mathfn_code (t))
1318 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1320 tree fndecl, arg, arglist;
1322 fndecl = get_callee_fndecl (t);
1323 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1324 arglist = build_tree_list (NULL_TREE, arg);
1325 return build_function_call_expr (fndecl, arglist);
1330 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1331 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1333 tree op1 = TREE_OPERAND (t, 1);
1334 if (TREE_INT_CST_HIGH (op1) == 0
1335 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1336 == TREE_INT_CST_LOW (op1))
1338 tree ntype = TYPE_UNSIGNED (type)
1339 ? lang_hooks.types.signed_type (type)
1340 : lang_hooks.types.unsigned_type (type);
1341 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1342 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1343 return fold_convert (type, temp);
1355 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1356 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1357 return NULL_TREE. */
1360 negate_expr (tree t)
1367 type = TREE_TYPE (t);
1368 STRIP_SIGN_NOPS (t);
1370 tem = fold_negate_expr (t);
1372 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1373 return fold_convert (type, tem);
1376 /* Split a tree IN into a constant, literal and variable parts that could be
1377 combined with CODE to make IN. "constant" means an expression with
1378 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1379 commutative arithmetic operation. Store the constant part into *CONP,
1380 the literal in *LITP and return the variable part. If a part isn't
1381 present, set it to null. If the tree does not decompose in this way,
1382 return the entire tree as the variable part and the other parts as null.
1384 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1385 case, we negate an operand that was subtracted. Except if it is a
1386 literal for which we use *MINUS_LITP instead.
1388 If NEGATE_P is true, we are negating all of IN, again except a literal
1389 for which we use *MINUS_LITP instead.
1391 If IN is itself a literal or constant, return it as appropriate.
1393 Note that we do not guarantee that any of the three values will be the
1394 same type as IN, but they will have the same signedness and mode. */
1397 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1398 tree *minus_litp, int negate_p)
1406 /* Strip any conversions that don't change the machine mode or signedness. */
1407 STRIP_SIGN_NOPS (in);
1409 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1411 else if (TREE_CODE (in) == code
1412 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1413 /* We can associate addition and subtraction together (even
1414 though the C standard doesn't say so) for integers because
1415 the value is not affected. For reals, the value might be
1416 affected, so we can't. */
1417 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1418 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1420 tree op0 = TREE_OPERAND (in, 0);
1421 tree op1 = TREE_OPERAND (in, 1);
1422 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1423 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1425 /* First see if either of the operands is a literal, then a constant. */
1426 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1427 *litp = op0, op0 = 0;
1428 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1429 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1431 if (op0 != 0 && TREE_CONSTANT (op0))
1432 *conp = op0, op0 = 0;
1433 else if (op1 != 0 && TREE_CONSTANT (op1))
1434 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1436 /* If we haven't dealt with either operand, this is not a case we can
1437 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1438 if (op0 != 0 && op1 != 0)
1443 var = op1, neg_var_p = neg1_p;
1445 /* Now do any needed negations. */
1447 *minus_litp = *litp, *litp = 0;
1449 *conp = negate_expr (*conp);
1451 var = negate_expr (var);
1453 else if (TREE_CONSTANT (in))
1461 *minus_litp = *litp, *litp = 0;
1462 else if (*minus_litp)
1463 *litp = *minus_litp, *minus_litp = 0;
1464 *conp = negate_expr (*conp);
1465 var = negate_expr (var);
1471 /* Re-associate trees split by the above function. T1 and T2 are either
1472 expressions to associate or null. Return the new expression, if any. If
1473 we build an operation, do it in TYPE and with CODE. */
1476 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1483 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1484 try to fold this since we will have infinite recursion. But do
1485 deal with any NEGATE_EXPRs. */
1486 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1487 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1489 if (code == PLUS_EXPR)
1491 if (TREE_CODE (t1) == NEGATE_EXPR)
1492 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1493 fold_convert (type, TREE_OPERAND (t1, 0)));
1494 else if (TREE_CODE (t2) == NEGATE_EXPR)
1495 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1496 fold_convert (type, TREE_OPERAND (t2, 0)));
1497 else if (integer_zerop (t2))
1498 return fold_convert (type, t1);
1500 else if (code == MINUS_EXPR)
1502 if (integer_zerop (t2))
1503 return fold_convert (type, t1);
1506 return build2 (code, type, fold_convert (type, t1),
1507 fold_convert (type, t2));
1510 return fold_build2 (code, type, fold_convert (type, t1),
1511 fold_convert (type, t2));
1514 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1515 to produce a new constant. Return NULL_TREE if we don't know how
1516 to evaluate CODE at compile-time.
1518 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1521 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1523 unsigned HOST_WIDE_INT int1l, int2l;
1524 HOST_WIDE_INT int1h, int2h;
1525 unsigned HOST_WIDE_INT low;
1527 unsigned HOST_WIDE_INT garbagel;
1528 HOST_WIDE_INT garbageh;
1530 tree type = TREE_TYPE (arg1);
1531 int uns = TYPE_UNSIGNED (type);
1533 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1536 int1l = TREE_INT_CST_LOW (arg1);
1537 int1h = TREE_INT_CST_HIGH (arg1);
1538 int2l = TREE_INT_CST_LOW (arg2);
1539 int2h = TREE_INT_CST_HIGH (arg2);
1544 low = int1l | int2l, hi = int1h | int2h;
1548 low = int1l ^ int2l, hi = int1h ^ int2h;
1552 low = int1l & int2l, hi = int1h & int2h;
1558 /* It's unclear from the C standard whether shifts can overflow.
1559 The following code ignores overflow; perhaps a C standard
1560 interpretation ruling is needed. */
1561 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1568 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1573 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1577 neg_double (int2l, int2h, &low, &hi);
1578 add_double (int1l, int1h, low, hi, &low, &hi);
1579 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1583 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1586 case TRUNC_DIV_EXPR:
1587 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1588 case EXACT_DIV_EXPR:
1589 /* This is a shortcut for a common special case. */
1590 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1591 && ! TREE_CONSTANT_OVERFLOW (arg1)
1592 && ! TREE_CONSTANT_OVERFLOW (arg2)
1593 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1595 if (code == CEIL_DIV_EXPR)
1598 low = int1l / int2l, hi = 0;
1602 /* ... fall through ... */
1604 case ROUND_DIV_EXPR:
1605 if (int2h == 0 && int2l == 0)
1607 if (int2h == 0 && int2l == 1)
1609 low = int1l, hi = int1h;
1612 if (int1l == int2l && int1h == int2h
1613 && ! (int1l == 0 && int1h == 0))
1618 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1619 &low, &hi, &garbagel, &garbageh);
1622 case TRUNC_MOD_EXPR:
1623 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1624 /* This is a shortcut for a common special case. */
1625 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1626 && ! TREE_CONSTANT_OVERFLOW (arg1)
1627 && ! TREE_CONSTANT_OVERFLOW (arg2)
1628 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1630 if (code == CEIL_MOD_EXPR)
1632 low = int1l % int2l, hi = 0;
1636 /* ... fall through ... */
1638 case ROUND_MOD_EXPR:
1639 if (int2h == 0 && int2l == 0)
1641 overflow = div_and_round_double (code, uns,
1642 int1l, int1h, int2l, int2h,
1643 &garbagel, &garbageh, &low, &hi);
1649 low = (((unsigned HOST_WIDE_INT) int1h
1650 < (unsigned HOST_WIDE_INT) int2h)
1651 || (((unsigned HOST_WIDE_INT) int1h
1652 == (unsigned HOST_WIDE_INT) int2h)
1655 low = (int1h < int2h
1656 || (int1h == int2h && int1l < int2l));
1658 if (low == (code == MIN_EXPR))
1659 low = int1l, hi = int1h;
1661 low = int2l, hi = int2h;
1668 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1672 /* Propagate overflow flags ourselves. */
1673 if (((!uns || is_sizetype) && overflow)
1674 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1677 TREE_OVERFLOW (t) = 1;
1678 TREE_CONSTANT_OVERFLOW (t) = 1;
1680 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1683 TREE_CONSTANT_OVERFLOW (t) = 1;
1687 t = force_fit_type (t, 1,
1688 ((!uns || is_sizetype) && overflow)
1689 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1690 TREE_CONSTANT_OVERFLOW (arg1)
1691 | TREE_CONSTANT_OVERFLOW (arg2));
1696 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1697 constant. We assume ARG1 and ARG2 have the same data type, or at least
1698 are the same kind of constant and the same machine mode. Return zero if
1699 combining the constants is not allowed in the current operating mode.
1701 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1704 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1706 /* Sanity check for the recursive cases. */
1713 if (TREE_CODE (arg1) == INTEGER_CST)
1714 return int_const_binop (code, arg1, arg2, notrunc);
1716 if (TREE_CODE (arg1) == REAL_CST)
1718 enum machine_mode mode;
1721 REAL_VALUE_TYPE value;
1722 REAL_VALUE_TYPE result;
1726 /* The following codes are handled by real_arithmetic. */
1741 d1 = TREE_REAL_CST (arg1);
1742 d2 = TREE_REAL_CST (arg2);
1744 type = TREE_TYPE (arg1);
1745 mode = TYPE_MODE (type);
1747 /* Don't perform operation if we honor signaling NaNs and
1748 either operand is a NaN. */
1749 if (HONOR_SNANS (mode)
1750 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1753 /* Don't perform operation if it would raise a division
1754 by zero exception. */
1755 if (code == RDIV_EXPR
1756 && REAL_VALUES_EQUAL (d2, dconst0)
1757 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1760 /* If either operand is a NaN, just return it. Otherwise, set up
1761 for floating-point trap; we return an overflow. */
1762 if (REAL_VALUE_ISNAN (d1))
1764 else if (REAL_VALUE_ISNAN (d2))
1767 inexact = real_arithmetic (&value, code, &d1, &d2);
1768 real_convert (&result, mode, &value);
1770 /* Don't constant fold this floating point operation if
1771 the result has overflowed and flag_trapping_math. */
1772 if (flag_trapping_math
1773 && MODE_HAS_INFINITIES (mode)
1774 && REAL_VALUE_ISINF (result)
1775 && !REAL_VALUE_ISINF (d1)
1776 && !REAL_VALUE_ISINF (d2))
1779 /* Don't constant fold this floating point operation if the
1780 result may dependent upon the run-time rounding mode and
1781 flag_rounding_math is set, or if GCC's software emulation
1782 is unable to accurately represent the result. */
1783 if ((flag_rounding_math
1784 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1785 && !flag_unsafe_math_optimizations))
1786 && (inexact || !real_identical (&result, &value)))
1789 t = build_real (type, result);
1791 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1792 TREE_CONSTANT_OVERFLOW (t)
1794 | TREE_CONSTANT_OVERFLOW (arg1)
1795 | TREE_CONSTANT_OVERFLOW (arg2);
1799 if (TREE_CODE (arg1) == COMPLEX_CST)
1801 tree type = TREE_TYPE (arg1);
1802 tree r1 = TREE_REALPART (arg1);
1803 tree i1 = TREE_IMAGPART (arg1);
1804 tree r2 = TREE_REALPART (arg2);
1805 tree i2 = TREE_IMAGPART (arg2);
1812 real = const_binop (code, r1, r2, notrunc);
1813 imag = const_binop (code, i1, i2, notrunc);
1817 real = const_binop (MINUS_EXPR,
1818 const_binop (MULT_EXPR, r1, r2, notrunc),
1819 const_binop (MULT_EXPR, i1, i2, notrunc),
1821 imag = const_binop (PLUS_EXPR,
1822 const_binop (MULT_EXPR, r1, i2, notrunc),
1823 const_binop (MULT_EXPR, i1, r2, notrunc),
1830 = const_binop (PLUS_EXPR,
1831 const_binop (MULT_EXPR, r2, r2, notrunc),
1832 const_binop (MULT_EXPR, i2, i2, notrunc),
1835 = const_binop (PLUS_EXPR,
1836 const_binop (MULT_EXPR, r1, r2, notrunc),
1837 const_binop (MULT_EXPR, i1, i2, notrunc),
1840 = const_binop (MINUS_EXPR,
1841 const_binop (MULT_EXPR, i1, r2, notrunc),
1842 const_binop (MULT_EXPR, r1, i2, notrunc),
1845 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1846 code = TRUNC_DIV_EXPR;
1848 real = const_binop (code, t1, magsquared, notrunc);
1849 imag = const_binop (code, t2, magsquared, notrunc);
1858 return build_complex (type, real, imag);
1864 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1865 indicates which particular sizetype to create. */
1868 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1870 return build_int_cst (sizetype_tab[(int) kind], number);
1873 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1874 is a tree code. The type of the result is taken from the operands.
1875 Both must be the same type integer type and it must be a size type.
1876 If the operands are constant, so is the result. */
1879 size_binop (enum tree_code code, tree arg0, tree arg1)
1881 tree type = TREE_TYPE (arg0);
1883 if (arg0 == error_mark_node || arg1 == error_mark_node)
1884 return error_mark_node;
1886 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1887 && type == TREE_TYPE (arg1));
1889 /* Handle the special case of two integer constants faster. */
1890 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1892 /* And some specific cases even faster than that. */
1893 if (code == PLUS_EXPR && integer_zerop (arg0))
1895 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1896 && integer_zerop (arg1))
1898 else if (code == MULT_EXPR && integer_onep (arg0))
1901 /* Handle general case of two integer constants. */
1902 return int_const_binop (code, arg0, arg1, 0);
1905 return fold_build2 (code, type, arg0, arg1);
1908 /* Given two values, either both of sizetype or both of bitsizetype,
1909 compute the difference between the two values. Return the value
1910 in signed type corresponding to the type of the operands. */
1913 size_diffop (tree arg0, tree arg1)
1915 tree type = TREE_TYPE (arg0);
1918 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1919 && type == TREE_TYPE (arg1));
1921 /* If the type is already signed, just do the simple thing. */
1922 if (!TYPE_UNSIGNED (type))
1923 return size_binop (MINUS_EXPR, arg0, arg1);
1925 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1927 /* If either operand is not a constant, do the conversions to the signed
1928 type and subtract. The hardware will do the right thing with any
1929 overflow in the subtraction. */
1930 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1931 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1932 fold_convert (ctype, arg1));
1934 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1935 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1936 overflow) and negate (which can't either). Special-case a result
1937 of zero while we're here. */
1938 if (tree_int_cst_equal (arg0, arg1))
1939 return build_int_cst (ctype, 0);
1940 else if (tree_int_cst_lt (arg1, arg0))
1941 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1943 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1944 fold_convert (ctype, size_binop (MINUS_EXPR,
1948 /* A subroutine of fold_convert_const handling conversions of an
1949 INTEGER_CST to another integer type. */
1952 fold_convert_const_int_from_int (tree type, tree arg1)
1956 /* Given an integer constant, make new constant with new type,
1957 appropriately sign-extended or truncated. */
1958 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1959 TREE_INT_CST_HIGH (arg1));
1961 t = force_fit_type (t,
1962 /* Don't set the overflow when
1963 converting a pointer */
1964 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1965 (TREE_INT_CST_HIGH (arg1) < 0
1966 && (TYPE_UNSIGNED (type)
1967 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1968 | TREE_OVERFLOW (arg1),
1969 TREE_CONSTANT_OVERFLOW (arg1));
1974 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1975 to an integer type. */
1978 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1983 /* The following code implements the floating point to integer
1984 conversion rules required by the Java Language Specification,
1985 that IEEE NaNs are mapped to zero and values that overflow
1986 the target precision saturate, i.e. values greater than
1987 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1988 are mapped to INT_MIN. These semantics are allowed by the
1989 C and C++ standards that simply state that the behavior of
1990 FP-to-integer conversion is unspecified upon overflow. */
1992 HOST_WIDE_INT high, low;
1994 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1998 case FIX_TRUNC_EXPR:
1999 real_trunc (&r, VOIDmode, &x);
2003 real_ceil (&r, VOIDmode, &x);
2006 case FIX_FLOOR_EXPR:
2007 real_floor (&r, VOIDmode, &x);
2010 case FIX_ROUND_EXPR:
2011 real_round (&r, VOIDmode, &x);
2018 /* If R is NaN, return zero and show we have an overflow. */
2019 if (REAL_VALUE_ISNAN (r))
2026 /* See if R is less than the lower bound or greater than the
2031 tree lt = TYPE_MIN_VALUE (type);
2032 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2033 if (REAL_VALUES_LESS (r, l))
2036 high = TREE_INT_CST_HIGH (lt);
2037 low = TREE_INT_CST_LOW (lt);
2043 tree ut = TYPE_MAX_VALUE (type);
2046 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2047 if (REAL_VALUES_LESS (u, r))
2050 high = TREE_INT_CST_HIGH (ut);
2051 low = TREE_INT_CST_LOW (ut);
2057 REAL_VALUE_TO_INT (&low, &high, r);
2059 t = build_int_cst_wide (type, low, high);
2061 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
2062 TREE_CONSTANT_OVERFLOW (arg1));
2066 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2067 to another floating point type. */
2070 fold_convert_const_real_from_real (tree type, tree arg1)
2072 REAL_VALUE_TYPE value;
2075 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2076 t = build_real (type, value);
2078 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2079 TREE_CONSTANT_OVERFLOW (t)
2080 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2084 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2085 type TYPE. If no simplification can be done return NULL_TREE. */
2088 fold_convert_const (enum tree_code code, tree type, tree arg1)
2090 if (TREE_TYPE (arg1) == type)
2093 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2095 if (TREE_CODE (arg1) == INTEGER_CST)
2096 return fold_convert_const_int_from_int (type, arg1);
2097 else if (TREE_CODE (arg1) == REAL_CST)
2098 return fold_convert_const_int_from_real (code, type, arg1);
2100 else if (TREE_CODE (type) == REAL_TYPE)
2102 if (TREE_CODE (arg1) == INTEGER_CST)
2103 return build_real_from_int_cst (type, arg1);
2104 if (TREE_CODE (arg1) == REAL_CST)
2105 return fold_convert_const_real_from_real (type, arg1);
2110 /* Construct a vector of zero elements of vector type TYPE. */
2113 build_zero_vector (tree type)
2118 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2119 units = TYPE_VECTOR_SUBPARTS (type);
2122 for (i = 0; i < units; i++)
2123 list = tree_cons (NULL_TREE, elem, list);
2124 return build_vector (type, list);
2127 /* Convert expression ARG to type TYPE. Used by the middle-end for
2128 simple conversions in preference to calling the front-end's convert. */
2131 fold_convert (tree type, tree arg)
2133 tree orig = TREE_TYPE (arg);
2139 if (TREE_CODE (arg) == ERROR_MARK
2140 || TREE_CODE (type) == ERROR_MARK
2141 || TREE_CODE (orig) == ERROR_MARK)
2142 return error_mark_node;
2144 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2145 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2146 TYPE_MAIN_VARIANT (orig)))
2147 return fold_build1 (NOP_EXPR, type, arg);
2149 switch (TREE_CODE (type))
2151 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2152 case POINTER_TYPE: case REFERENCE_TYPE:
2154 if (TREE_CODE (arg) == INTEGER_CST)
2156 tem = fold_convert_const (NOP_EXPR, type, arg);
2157 if (tem != NULL_TREE)
2160 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2161 || TREE_CODE (orig) == OFFSET_TYPE)
2162 return fold_build1 (NOP_EXPR, type, arg);
2163 if (TREE_CODE (orig) == COMPLEX_TYPE)
2165 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2166 return fold_convert (type, tem);
2168 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2169 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2170 return fold_build1 (NOP_EXPR, type, arg);
2173 if (TREE_CODE (arg) == INTEGER_CST)
2175 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2176 if (tem != NULL_TREE)
2179 else if (TREE_CODE (arg) == REAL_CST)
2181 tem = fold_convert_const (NOP_EXPR, type, arg);
2182 if (tem != NULL_TREE)
2186 switch (TREE_CODE (orig))
2189 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2190 case POINTER_TYPE: case REFERENCE_TYPE:
2191 return fold_build1 (FLOAT_EXPR, type, arg);
2194 return fold_build1 (NOP_EXPR, type, arg);
2197 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2198 return fold_convert (type, tem);
2205 switch (TREE_CODE (orig))
2208 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2209 case POINTER_TYPE: case REFERENCE_TYPE:
2211 return build2 (COMPLEX_EXPR, type,
2212 fold_convert (TREE_TYPE (type), arg),
2213 fold_convert (TREE_TYPE (type), integer_zero_node));
2218 if (TREE_CODE (arg) == COMPLEX_EXPR)
2220 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2221 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2222 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2225 arg = save_expr (arg);
2226 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2227 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2228 rpart = fold_convert (TREE_TYPE (type), rpart);
2229 ipart = fold_convert (TREE_TYPE (type), ipart);
2230 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2238 if (integer_zerop (arg))
2239 return build_zero_vector (type);
2240 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2241 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2242 || TREE_CODE (orig) == VECTOR_TYPE);
2243 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2246 return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
2253 /* Return false if expr can be assumed not to be an lvalue, true
2257 maybe_lvalue_p (tree x)
2259 /* We only need to wrap lvalue tree codes. */
2260 switch (TREE_CODE (x))
2271 case ALIGN_INDIRECT_REF:
2272 case MISALIGNED_INDIRECT_REF:
2274 case ARRAY_RANGE_REF:
2280 case PREINCREMENT_EXPR:
2281 case PREDECREMENT_EXPR:
2283 case TRY_CATCH_EXPR:
2284 case WITH_CLEANUP_EXPR:
2295 /* Assume the worst for front-end tree codes. */
2296 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2304 /* Return an expr equal to X but certainly not valid as an lvalue. */
2309 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2314 if (! maybe_lvalue_p (x))
2316 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2319 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2320 Zero means allow extended lvalues. */
2322 int pedantic_lvalues;
2324 /* When pedantic, return an expr equal to X but certainly not valid as a
2325 pedantic lvalue. Otherwise, return X. */
2328 pedantic_non_lvalue (tree x)
2330 if (pedantic_lvalues)
2331 return non_lvalue (x);
2336 /* Given a tree comparison code, return the code that is the logical inverse
2337 of the given code. It is not safe to do this for floating-point
2338 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2339 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2342 invert_tree_comparison (enum tree_code code, bool honor_nans)
2344 if (honor_nans && flag_trapping_math)
2354 return honor_nans ? UNLE_EXPR : LE_EXPR;
2356 return honor_nans ? UNLT_EXPR : LT_EXPR;
2358 return honor_nans ? UNGE_EXPR : GE_EXPR;
2360 return honor_nans ? UNGT_EXPR : GT_EXPR;
2374 return UNORDERED_EXPR;
2375 case UNORDERED_EXPR:
2376 return ORDERED_EXPR;
2382 /* Similar, but return the comparison that results if the operands are
2383 swapped. This is safe for floating-point. */
2386 swap_tree_comparison (enum tree_code code)
2393 case UNORDERED_EXPR:
2419 /* Convert a comparison tree code from an enum tree_code representation
2420 into a compcode bit-based encoding. This function is the inverse of
2421 compcode_to_comparison. */
2423 static enum comparison_code
2424 comparison_to_compcode (enum tree_code code)
2441 return COMPCODE_ORD;
2442 case UNORDERED_EXPR:
2443 return COMPCODE_UNORD;
2445 return COMPCODE_UNLT;
2447 return COMPCODE_UNEQ;
2449 return COMPCODE_UNLE;
2451 return COMPCODE_UNGT;
2453 return COMPCODE_LTGT;
2455 return COMPCODE_UNGE;
2461 /* Convert a compcode bit-based encoding of a comparison operator back
2462 to GCC's enum tree_code representation. This function is the
2463 inverse of comparison_to_compcode. */
2465 static enum tree_code
2466 compcode_to_comparison (enum comparison_code code)
2483 return ORDERED_EXPR;
2484 case COMPCODE_UNORD:
2485 return UNORDERED_EXPR;
2503 /* Return a tree for the comparison which is the combination of
2504 doing the AND or OR (depending on CODE) of the two operations LCODE
2505 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2506 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2507 if this makes the transformation invalid. */
2510 combine_comparisons (enum tree_code code, enum tree_code lcode,
2511 enum tree_code rcode, tree truth_type,
2512 tree ll_arg, tree lr_arg)
2514 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2515 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2516 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2517 enum comparison_code compcode;
2521 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2522 compcode = lcompcode & rcompcode;
2525 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2526 compcode = lcompcode | rcompcode;
2535 /* Eliminate unordered comparisons, as well as LTGT and ORD
2536 which are not used unless the mode has NaNs. */
2537 compcode &= ~COMPCODE_UNORD;
2538 if (compcode == COMPCODE_LTGT)
2539 compcode = COMPCODE_NE;
2540 else if (compcode == COMPCODE_ORD)
2541 compcode = COMPCODE_TRUE;
2543 else if (flag_trapping_math)
2545 /* Check that the original operation and the optimized ones will trap
2546 under the same condition. */
2547 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2548 && (lcompcode != COMPCODE_EQ)
2549 && (lcompcode != COMPCODE_ORD);
2550 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2551 && (rcompcode != COMPCODE_EQ)
2552 && (rcompcode != COMPCODE_ORD);
2553 bool trap = (compcode & COMPCODE_UNORD) == 0
2554 && (compcode != COMPCODE_EQ)
2555 && (compcode != COMPCODE_ORD);
2557 /* In a short-circuited boolean expression the LHS might be
2558 such that the RHS, if evaluated, will never trap. For
2559 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2560 if neither x nor y is NaN. (This is a mixed blessing: for
2561 example, the expression above will never trap, hence
2562 optimizing it to x < y would be invalid). */
2563 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2564 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2567 /* If the comparison was short-circuited, and only the RHS
2568 trapped, we may now generate a spurious trap. */
2570 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2573 /* If we changed the conditions that cause a trap, we lose. */
2574 if ((ltrap || rtrap) != trap)
2578 if (compcode == COMPCODE_TRUE)
2579 return constant_boolean_node (true, truth_type);
2580 else if (compcode == COMPCODE_FALSE)
2581 return constant_boolean_node (false, truth_type);
2583 return fold_build2 (compcode_to_comparison (compcode),
2584 truth_type, ll_arg, lr_arg);
2587 /* Return nonzero if CODE is a tree code that represents a truth value. */
2590 truth_value_p (enum tree_code code)
2592 return (TREE_CODE_CLASS (code) == tcc_comparison
2593 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2594 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2595 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2598 /* Return nonzero if two operands (typically of the same tree node)
2599 are necessarily equal. If either argument has side-effects this
2600 function returns zero. FLAGS modifies behavior as follows:
2602 If OEP_ONLY_CONST is set, only return nonzero for constants.
2603 This function tests whether the operands are indistinguishable;
2604 it does not test whether they are equal using C's == operation.
2605 The distinction is important for IEEE floating point, because
2606 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2607 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2609 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2610 even though it may hold multiple values during a function.
2611 This is because a GCC tree node guarantees that nothing else is
2612 executed between the evaluation of its "operands" (which may often
2613 be evaluated in arbitrary order). Hence if the operands themselves
2614 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2615 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2616 unset means assuming isochronic (or instantaneous) tree equivalence.
2617 Unless comparing arbitrary expression trees, such as from different
2618 statements, this flag can usually be left unset.
2620 If OEP_PURE_SAME is set, then pure functions with identical arguments
2621 are considered the same. It is used when the caller has other ways
2622 to ensure that global memory is unchanged in between. */
2625 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2627 /* If either is ERROR_MARK, they aren't equal. */
2628 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2631 /* If both types don't have the same signedness, then we can't consider
2632 them equal. We must check this before the STRIP_NOPS calls
2633 because they may change the signedness of the arguments. */
2634 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2637 /* If both types don't have the same precision, then it is not safe
2639 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2645 /* In case both args are comparisons but with different comparison
2646 code, try to swap the comparison operands of one arg to produce
2647 a match and compare that variant. */
2648 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2649 && COMPARISON_CLASS_P (arg0)
2650 && COMPARISON_CLASS_P (arg1))
2652 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2654 if (TREE_CODE (arg0) == swap_code)
2655 return operand_equal_p (TREE_OPERAND (arg0, 0),
2656 TREE_OPERAND (arg1, 1), flags)
2657 && operand_equal_p (TREE_OPERAND (arg0, 1),
2658 TREE_OPERAND (arg1, 0), flags);
2661 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2662 /* This is needed for conversions and for COMPONENT_REF.
2663 Might as well play it safe and always test this. */
2664 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2665 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2666 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2669 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2670 We don't care about side effects in that case because the SAVE_EXPR
2671 takes care of that for us. In all other cases, two expressions are
2672 equal if they have no side effects. If we have two identical
2673 expressions with side effects that should be treated the same due
2674 to the only side effects being identical SAVE_EXPR's, that will
2675 be detected in the recursive calls below. */
2676 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2677 && (TREE_CODE (arg0) == SAVE_EXPR
2678 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2681 /* Next handle constant cases, those for which we can return 1 even
2682 if ONLY_CONST is set. */
2683 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2684 switch (TREE_CODE (arg0))
2687 return (! TREE_CONSTANT_OVERFLOW (arg0)
2688 && ! TREE_CONSTANT_OVERFLOW (arg1)
2689 && tree_int_cst_equal (arg0, arg1));
2692 return (! TREE_CONSTANT_OVERFLOW (arg0)
2693 && ! TREE_CONSTANT_OVERFLOW (arg1)
2694 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2695 TREE_REAL_CST (arg1)));
2701 if (TREE_CONSTANT_OVERFLOW (arg0)
2702 || TREE_CONSTANT_OVERFLOW (arg1))
2705 v1 = TREE_VECTOR_CST_ELTS (arg0);
2706 v2 = TREE_VECTOR_CST_ELTS (arg1);
2709 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2712 v1 = TREE_CHAIN (v1);
2713 v2 = TREE_CHAIN (v2);
2720 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2722 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2726 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2727 && ! memcmp (TREE_STRING_POINTER (arg0),
2728 TREE_STRING_POINTER (arg1),
2729 TREE_STRING_LENGTH (arg0)));
2732 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2738 if (flags & OEP_ONLY_CONST)
2741 /* Define macros to test an operand from arg0 and arg1 for equality and a
2742 variant that allows null and views null as being different from any
2743 non-null value. In the latter case, if either is null, the both
2744 must be; otherwise, do the normal comparison. */
2745 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2746 TREE_OPERAND (arg1, N), flags)
2748 #define OP_SAME_WITH_NULL(N) \
2749 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2750 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2752 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2755 /* Two conversions are equal only if signedness and modes match. */
2756 switch (TREE_CODE (arg0))
2761 case FIX_TRUNC_EXPR:
2762 case FIX_FLOOR_EXPR:
2763 case FIX_ROUND_EXPR:
2764 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2765 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2775 case tcc_comparison:
2777 if (OP_SAME (0) && OP_SAME (1))
2780 /* For commutative ops, allow the other order. */
2781 return (commutative_tree_code (TREE_CODE (arg0))
2782 && operand_equal_p (TREE_OPERAND (arg0, 0),
2783 TREE_OPERAND (arg1, 1), flags)
2784 && operand_equal_p (TREE_OPERAND (arg0, 1),
2785 TREE_OPERAND (arg1, 0), flags));
2788 /* If either of the pointer (or reference) expressions we are
2789 dereferencing contain a side effect, these cannot be equal. */
2790 if (TREE_SIDE_EFFECTS (arg0)
2791 || TREE_SIDE_EFFECTS (arg1))
2794 switch (TREE_CODE (arg0))
2797 case ALIGN_INDIRECT_REF:
2798 case MISALIGNED_INDIRECT_REF:
2804 case ARRAY_RANGE_REF:
2805 /* Operands 2 and 3 may be null.
2806 Compare the array index by value if it is constant first as we
2807 may have different types but same value here. */
2809 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2810 TREE_OPERAND (arg1, 1))
2812 && OP_SAME_WITH_NULL (2)
2813 && OP_SAME_WITH_NULL (3));
2816 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2817 may be NULL when we're called to compare MEM_EXPRs. */
2818 return OP_SAME_WITH_NULL (0)
2820 && OP_SAME_WITH_NULL (2);
2823 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2829 case tcc_expression:
2830 switch (TREE_CODE (arg0))
2833 case TRUTH_NOT_EXPR:
2836 case TRUTH_ANDIF_EXPR:
2837 case TRUTH_ORIF_EXPR:
2838 return OP_SAME (0) && OP_SAME (1);
2840 case TRUTH_AND_EXPR:
2842 case TRUTH_XOR_EXPR:
2843 if (OP_SAME (0) && OP_SAME (1))
2846 /* Otherwise take into account this is a commutative operation. */
2847 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2848 TREE_OPERAND (arg1, 1), flags)
2849 && operand_equal_p (TREE_OPERAND (arg0, 1),
2850 TREE_OPERAND (arg1, 0), flags));
2853 /* If the CALL_EXPRs call different functions, then they
2854 clearly can not be equal. */
2859 unsigned int cef = call_expr_flags (arg0);
2860 if (flags & OEP_PURE_SAME)
2861 cef &= ECF_CONST | ECF_PURE;
2868 /* Now see if all the arguments are the same. operand_equal_p
2869 does not handle TREE_LIST, so we walk the operands here
2870 feeding them to operand_equal_p. */
2871 arg0 = TREE_OPERAND (arg0, 1);
2872 arg1 = TREE_OPERAND (arg1, 1);
2873 while (arg0 && arg1)
2875 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2879 arg0 = TREE_CHAIN (arg0);
2880 arg1 = TREE_CHAIN (arg1);
2883 /* If we get here and both argument lists are exhausted
2884 then the CALL_EXPRs are equal. */
2885 return ! (arg0 || arg1);
2891 case tcc_declaration:
2892 /* Consider __builtin_sqrt equal to sqrt. */
2893 return (TREE_CODE (arg0) == FUNCTION_DECL
2894 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2895 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2896 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2903 #undef OP_SAME_WITH_NULL
2906 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2907 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2909 When in doubt, return 0. */
2912 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2914 int unsignedp1, unsignedpo;
2915 tree primarg0, primarg1, primother;
2916 unsigned int correct_width;
2918 if (operand_equal_p (arg0, arg1, 0))
2921 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2922 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2925 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2926 and see if the inner values are the same. This removes any
2927 signedness comparison, which doesn't matter here. */
2928 primarg0 = arg0, primarg1 = arg1;
2929 STRIP_NOPS (primarg0);
2930 STRIP_NOPS (primarg1);
2931 if (operand_equal_p (primarg0, primarg1, 0))
2934 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2935 actual comparison operand, ARG0.
2937 First throw away any conversions to wider types
2938 already present in the operands. */
2940 primarg1 = get_narrower (arg1, &unsignedp1);
2941 primother = get_narrower (other, &unsignedpo);
2943 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2944 if (unsignedp1 == unsignedpo
2945 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2946 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2948 tree type = TREE_TYPE (arg0);
2950 /* Make sure shorter operand is extended the right way
2951 to match the longer operand. */
2952 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2953 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2955 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2962 /* See if ARG is an expression that is either a comparison or is performing
2963 arithmetic on comparisons. The comparisons must only be comparing
2964 two different values, which will be stored in *CVAL1 and *CVAL2; if
2965 they are nonzero it means that some operands have already been found.
2966 No variables may be used anywhere else in the expression except in the
2967 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2968 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2970 If this is true, return 1. Otherwise, return zero. */
2973 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2975 enum tree_code code = TREE_CODE (arg);
2976 enum tree_code_class class = TREE_CODE_CLASS (code);
2978 /* We can handle some of the tcc_expression cases here. */
2979 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2981 else if (class == tcc_expression
2982 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2983 || code == COMPOUND_EXPR))
2986 else if (class == tcc_expression && code == SAVE_EXPR
2987 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2989 /* If we've already found a CVAL1 or CVAL2, this expression is
2990 two complex to handle. */
2991 if (*cval1 || *cval2)
3001 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3004 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3005 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3006 cval1, cval2, save_p));
3011 case tcc_expression:
3012 if (code == COND_EXPR)
3013 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3014 cval1, cval2, save_p)
3015 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3016 cval1, cval2, save_p)
3017 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3018 cval1, cval2, save_p));
3021 case tcc_comparison:
3022 /* First see if we can handle the first operand, then the second. For
3023 the second operand, we know *CVAL1 can't be zero. It must be that
3024 one side of the comparison is each of the values; test for the
3025 case where this isn't true by failing if the two operands
3028 if (operand_equal_p (TREE_OPERAND (arg, 0),
3029 TREE_OPERAND (arg, 1), 0))
3033 *cval1 = TREE_OPERAND (arg, 0);
3034 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3036 else if (*cval2 == 0)
3037 *cval2 = TREE_OPERAND (arg, 0);
3038 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3043 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3045 else if (*cval2 == 0)
3046 *cval2 = TREE_OPERAND (arg, 1);
3047 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3059 /* ARG is a tree that is known to contain just arithmetic operations and
3060 comparisons. Evaluate the operations in the tree substituting NEW0 for
3061 any occurrence of OLD0 as an operand of a comparison and likewise for
3065 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3067 tree type = TREE_TYPE (arg);
3068 enum tree_code code = TREE_CODE (arg);
3069 enum tree_code_class class = TREE_CODE_CLASS (code);
3071 /* We can handle some of the tcc_expression cases here. */
3072 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3074 else if (class == tcc_expression
3075 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3081 return fold_build1 (code, type,
3082 eval_subst (TREE_OPERAND (arg, 0),
3083 old0, new0, old1, new1));
3086 return fold_build2 (code, type,
3087 eval_subst (TREE_OPERAND (arg, 0),
3088 old0, new0, old1, new1),
3089 eval_subst (TREE_OPERAND (arg, 1),
3090 old0, new0, old1, new1));
3092 case tcc_expression:
3096 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3099 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3102 return fold_build3 (code, type,
3103 eval_subst (TREE_OPERAND (arg, 0),
3104 old0, new0, old1, new1),
3105 eval_subst (TREE_OPERAND (arg, 1),
3106 old0, new0, old1, new1),
3107 eval_subst (TREE_OPERAND (arg, 2),
3108 old0, new0, old1, new1));
3112 /* Fall through - ??? */
3114 case tcc_comparison:
3116 tree arg0 = TREE_OPERAND (arg, 0);
3117 tree arg1 = TREE_OPERAND (arg, 1);
3119 /* We need to check both for exact equality and tree equality. The
3120 former will be true if the operand has a side-effect. In that
3121 case, we know the operand occurred exactly once. */
3123 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3125 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3128 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3130 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3133 return fold_build2 (code, type, arg0, arg1);
3141 /* Return a tree for the case when the result of an expression is RESULT
3142 converted to TYPE and OMITTED was previously an operand of the expression
3143 but is now not needed (e.g., we folded OMITTED * 0).
3145 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3146 the conversion of RESULT to TYPE. */
3149 omit_one_operand (tree type, tree result, tree omitted)
3151 tree t = fold_convert (type, result);
3153 if (TREE_SIDE_EFFECTS (omitted))
3154 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3156 return non_lvalue (t);
3159 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3162 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3164 tree t = fold_convert (type, result);
3166 if (TREE_SIDE_EFFECTS (omitted))
3167 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3169 return pedantic_non_lvalue (t);
3172 /* Return a tree for the case when the result of an expression is RESULT
3173 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3174 of the expression but are now not needed.
3176 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3177 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3178 evaluated before OMITTED2. Otherwise, if neither has side effects,
3179 just do the conversion of RESULT to TYPE. */
3182 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3184 tree t = fold_convert (type, result);
3186 if (TREE_SIDE_EFFECTS (omitted2))
3187 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3188 if (TREE_SIDE_EFFECTS (omitted1))
3189 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3191 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3195 /* Return a simplified tree node for the truth-negation of ARG. This
3196 never alters ARG itself. We assume that ARG is an operation that
3197 returns a truth value (0 or 1).
3199 FIXME: one would think we would fold the result, but it causes
3200 problems with the dominator optimizer. */
3203 fold_truth_not_expr (tree arg)
3205 tree type = TREE_TYPE (arg);
3206 enum tree_code code = TREE_CODE (arg);
3208 /* If this is a comparison, we can simply invert it, except for
3209 floating-point non-equality comparisons, in which case we just
3210 enclose a TRUTH_NOT_EXPR around what we have. */
3212 if (TREE_CODE_CLASS (code) == tcc_comparison)
3214 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3215 if (FLOAT_TYPE_P (op_type)
3216 && flag_trapping_math
3217 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3218 && code != NE_EXPR && code != EQ_EXPR)
3222 code = invert_tree_comparison (code,
3223 HONOR_NANS (TYPE_MODE (op_type)));
3224 if (code == ERROR_MARK)
3227 return build2 (code, type,
3228 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3235 return constant_boolean_node (integer_zerop (arg), type);
3237 case TRUTH_AND_EXPR:
3238 return build2 (TRUTH_OR_EXPR, type,
3239 invert_truthvalue (TREE_OPERAND (arg, 0)),
3240 invert_truthvalue (TREE_OPERAND (arg, 1)));
3243 return build2 (TRUTH_AND_EXPR, type,
3244 invert_truthvalue (TREE_OPERAND (arg, 0)),
3245 invert_truthvalue (TREE_OPERAND (arg, 1)));
3247 case TRUTH_XOR_EXPR:
3248 /* Here we can invert either operand. We invert the first operand
3249 unless the second operand is a TRUTH_NOT_EXPR in which case our
3250 result is the XOR of the first operand with the inside of the
3251 negation of the second operand. */
3253 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3254 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3255 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3257 return build2 (TRUTH_XOR_EXPR, type,
3258 invert_truthvalue (TREE_OPERAND (arg, 0)),
3259 TREE_OPERAND (arg, 1));
3261 case TRUTH_ANDIF_EXPR:
3262 return build2 (TRUTH_ORIF_EXPR, type,
3263 invert_truthvalue (TREE_OPERAND (arg, 0)),
3264 invert_truthvalue (TREE_OPERAND (arg, 1)));
3266 case TRUTH_ORIF_EXPR:
3267 return build2 (TRUTH_ANDIF_EXPR, type,
3268 invert_truthvalue (TREE_OPERAND (arg, 0)),
3269 invert_truthvalue (TREE_OPERAND (arg, 1)));
3271 case TRUTH_NOT_EXPR:
3272 return TREE_OPERAND (arg, 0);
3276 tree arg1 = TREE_OPERAND (arg, 1);
3277 tree arg2 = TREE_OPERAND (arg, 2);
3278 /* A COND_EXPR may have a throw as one operand, which
3279 then has void type. Just leave void operands
3281 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3282 VOID_TYPE_P (TREE_TYPE (arg1))
3283 ? arg1 : invert_truthvalue (arg1),
3284 VOID_TYPE_P (TREE_TYPE (arg2))
3285 ? arg2 : invert_truthvalue (arg2));
3289 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3290 invert_truthvalue (TREE_OPERAND (arg, 1)));
3292 case NON_LVALUE_EXPR:
3293 return invert_truthvalue (TREE_OPERAND (arg, 0));
3296 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3297 return build1 (TRUTH_NOT_EXPR, type, arg);
3301 return build1 (TREE_CODE (arg), type,
3302 invert_truthvalue (TREE_OPERAND (arg, 0)));
3305 if (!integer_onep (TREE_OPERAND (arg, 1)))
3307 return build2 (EQ_EXPR, type, arg,
3308 build_int_cst (type, 0));
3311 return build1 (TRUTH_NOT_EXPR, type, arg);
3313 case CLEANUP_POINT_EXPR:
3314 return build1 (CLEANUP_POINT_EXPR, type,
3315 invert_truthvalue (TREE_OPERAND (arg, 0)));
3324 /* Return a simplified tree node for the truth-negation of ARG. This
3325 never alters ARG itself. We assume that ARG is an operation that
3326 returns a truth value (0 or 1).
3328 FIXME: one would think we would fold the result, but it causes
3329 problems with the dominator optimizer. */
3332 invert_truthvalue (tree arg)
3336 if (TREE_CODE (arg) == ERROR_MARK)
3339 tem = fold_truth_not_expr (arg);
3341 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3346 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3347 operands are another bit-wise operation with a common input. If so,
3348 distribute the bit operations to save an operation and possibly two if
3349 constants are involved. For example, convert
3350 (A | B) & (A | C) into A | (B & C)
3351 Further simplification will occur if B and C are constants.
3353 If this optimization cannot be done, 0 will be returned. */
3356 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3361 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3362 || TREE_CODE (arg0) == code
3363 || (TREE_CODE (arg0) != BIT_AND_EXPR
3364 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3367 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3369 common = TREE_OPERAND (arg0, 0);
3370 left = TREE_OPERAND (arg0, 1);
3371 right = TREE_OPERAND (arg1, 1);
3373 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3375 common = TREE_OPERAND (arg0, 0);
3376 left = TREE_OPERAND (arg0, 1);
3377 right = TREE_OPERAND (arg1, 0);
3379 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3381 common = TREE_OPERAND (arg0, 1);
3382 left = TREE_OPERAND (arg0, 0);
3383 right = TREE_OPERAND (arg1, 1);
3385 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3387 common = TREE_OPERAND (arg0, 1);
3388 left = TREE_OPERAND (arg0, 0);
3389 right = TREE_OPERAND (arg1, 0);
3394 return fold_build2 (TREE_CODE (arg0), type, common,
3395 fold_build2 (code, type, left, right));
3398 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3399 with code CODE. This optimization is unsafe. */
3401 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3403 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3404 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3406 /* (A / C) +- (B / C) -> (A +- B) / C. */
3408 && operand_equal_p (TREE_OPERAND (arg0, 1),
3409 TREE_OPERAND (arg1, 1), 0))
3410 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3411 fold_build2 (code, type,
3412 TREE_OPERAND (arg0, 0),
3413 TREE_OPERAND (arg1, 0)),
3414 TREE_OPERAND (arg0, 1));
3416 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3417 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3418 TREE_OPERAND (arg1, 0), 0)
3419 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3420 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3422 REAL_VALUE_TYPE r0, r1;
3423 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3424 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3426 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3428 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3429 real_arithmetic (&r0, code, &r0, &r1);
3430 return fold_build2 (MULT_EXPR, type,
3431 TREE_OPERAND (arg0, 0),
3432 build_real (type, r0));
3438 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3439 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3442 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3449 tree size = TYPE_SIZE (TREE_TYPE (inner));
3450 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3451 || POINTER_TYPE_P (TREE_TYPE (inner)))
3452 && host_integerp (size, 0)
3453 && tree_low_cst (size, 0) == bitsize)
3454 return fold_convert (type, inner);
3457 result = build3 (BIT_FIELD_REF, type, inner,
3458 size_int (bitsize), bitsize_int (bitpos));
3460 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3465 /* Optimize a bit-field compare.
3467 There are two cases: First is a compare against a constant and the
3468 second is a comparison of two items where the fields are at the same
3469 bit position relative to the start of a chunk (byte, halfword, word)
3470 large enough to contain it. In these cases we can avoid the shift
3471 implicit in bitfield extractions.
3473 For constants, we emit a compare of the shifted constant with the
3474 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3475 compared. For two fields at the same position, we do the ANDs with the
3476 similar mask and compare the result of the ANDs.
3478 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3479 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3480 are the left and right operands of the comparison, respectively.
3482 If the optimization described above can be done, we return the resulting
3483 tree. Otherwise we return zero. */
3486 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3489 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3490 tree type = TREE_TYPE (lhs);
3491 tree signed_type, unsigned_type;
3492 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3493 enum machine_mode lmode, rmode, nmode;
3494 int lunsignedp, runsignedp;
3495 int lvolatilep = 0, rvolatilep = 0;
3496 tree linner, rinner = NULL_TREE;
3500 /* Get all the information about the extractions being done. If the bit size
3501 if the same as the size of the underlying object, we aren't doing an
3502 extraction at all and so can do nothing. We also don't want to
3503 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3504 then will no longer be able to replace it. */
3505 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3506 &lunsignedp, &lvolatilep, false);
3507 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3508 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3513 /* If this is not a constant, we can only do something if bit positions,
3514 sizes, and signedness are the same. */
3515 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3516 &runsignedp, &rvolatilep, false);
3518 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3519 || lunsignedp != runsignedp || offset != 0
3520 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3524 /* See if we can find a mode to refer to this field. We should be able to,
3525 but fail if we can't. */
3526 nmode = get_best_mode (lbitsize, lbitpos,
3527 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3528 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3529 TYPE_ALIGN (TREE_TYPE (rinner))),
3530 word_mode, lvolatilep || rvolatilep);
3531 if (nmode == VOIDmode)
3534 /* Set signed and unsigned types of the precision of this mode for the
3536 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3537 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3539 /* Compute the bit position and size for the new reference and our offset
3540 within it. If the new reference is the same size as the original, we
3541 won't optimize anything, so return zero. */
3542 nbitsize = GET_MODE_BITSIZE (nmode);
3543 nbitpos = lbitpos & ~ (nbitsize - 1);
3545 if (nbitsize == lbitsize)
3548 if (BYTES_BIG_ENDIAN)
3549 lbitpos = nbitsize - lbitsize - lbitpos;
3551 /* Make the mask to be used against the extracted field. */
3552 mask = build_int_cst (unsigned_type, -1);
3553 mask = force_fit_type (mask, 0, false, false);
3554 mask = fold_convert (unsigned_type, mask);
3555 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3556 mask = const_binop (RSHIFT_EXPR, mask,
3557 size_int (nbitsize - lbitsize - lbitpos), 0);
3560 /* If not comparing with constant, just rework the comparison
3562 return build2 (code, compare_type,
3563 build2 (BIT_AND_EXPR, unsigned_type,
3564 make_bit_field_ref (linner, unsigned_type,
3565 nbitsize, nbitpos, 1),
3567 build2 (BIT_AND_EXPR, unsigned_type,
3568 make_bit_field_ref (rinner, unsigned_type,
3569 nbitsize, nbitpos, 1),
3572 /* Otherwise, we are handling the constant case. See if the constant is too
3573 big for the field. Warn and return a tree of for 0 (false) if so. We do
3574 this not only for its own sake, but to avoid having to test for this
3575 error case below. If we didn't, we might generate wrong code.
3577 For unsigned fields, the constant shifted right by the field length should
3578 be all zero. For signed fields, the high-order bits should agree with
3583 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3584 fold_convert (unsigned_type, rhs),
3585 size_int (lbitsize), 0)))
3587 warning (0, "comparison is always %d due to width of bit-field",
3589 return constant_boolean_node (code == NE_EXPR, compare_type);
3594 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3595 size_int (lbitsize - 1), 0);
3596 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3598 warning (0, "comparison is always %d due to width of bit-field",
3600 return constant_boolean_node (code == NE_EXPR, compare_type);
3604 /* Single-bit compares should always be against zero. */
3605 if (lbitsize == 1 && ! integer_zerop (rhs))
3607 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3608 rhs = build_int_cst (type, 0);
3611 /* Make a new bitfield reference, shift the constant over the
3612 appropriate number of bits and mask it with the computed mask
3613 (in case this was a signed field). If we changed it, make a new one. */
3614 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3617 TREE_SIDE_EFFECTS (lhs) = 1;
3618 TREE_THIS_VOLATILE (lhs) = 1;
3621 rhs = const_binop (BIT_AND_EXPR,
3622 const_binop (LSHIFT_EXPR,
3623 fold_convert (unsigned_type, rhs),
3624 size_int (lbitpos), 0),
3627 return build2 (code, compare_type,
3628 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3632 /* Subroutine for fold_truthop: decode a field reference.
3634 If EXP is a comparison reference, we return the innermost reference.
3636 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3637 set to the starting bit number.
3639 If the innermost field can be completely contained in a mode-sized
3640 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3642 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3643 otherwise it is not changed.
3645 *PUNSIGNEDP is set to the signedness of the field.
3647 *PMASK is set to the mask used. This is either contained in a
3648 BIT_AND_EXPR or derived from the width of the field.
3650 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3652 Return 0 if this is not a component reference or is one that we can't
3653 do anything with. */
3656 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3657 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3658 int *punsignedp, int *pvolatilep,
3659 tree *pmask, tree *pand_mask)
3661 tree outer_type = 0;
3663 tree mask, inner, offset;
3665 unsigned int precision;
3667 /* All the optimizations using this function assume integer fields.
3668 There are problems with FP fields since the type_for_size call
3669 below can fail for, e.g., XFmode. */
3670 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3673 /* We are interested in the bare arrangement of bits, so strip everything
3674 that doesn't affect the machine mode. However, record the type of the
3675 outermost expression if it may matter below. */
3676 if (TREE_CODE (exp) == NOP_EXPR
3677 || TREE_CODE (exp) == CONVERT_EXPR
3678 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3679 outer_type = TREE_TYPE (exp);
3682 if (TREE_CODE (exp) == BIT_AND_EXPR)
3684 and_mask = TREE_OPERAND (exp, 1);
3685 exp = TREE_OPERAND (exp, 0);
3686 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3687 if (TREE_CODE (and_mask) != INTEGER_CST)
3691 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3692 punsignedp, pvolatilep, false);
3693 if ((inner == exp && and_mask == 0)
3694 || *pbitsize < 0 || offset != 0
3695 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3698 /* If the number of bits in the reference is the same as the bitsize of
3699 the outer type, then the outer type gives the signedness. Otherwise
3700 (in case of a small bitfield) the signedness is unchanged. */
3701 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3702 *punsignedp = TYPE_UNSIGNED (outer_type);
3704 /* Compute the mask to access the bitfield. */
3705 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3706 precision = TYPE_PRECISION (unsigned_type);
3708 mask = build_int_cst (unsigned_type, -1);
3709 mask = force_fit_type (mask, 0, false, false);
3711 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3712 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3714 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3716 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3717 fold_convert (unsigned_type, and_mask), mask);
3720 *pand_mask = and_mask;
3724 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3728 all_ones_mask_p (tree mask, int size)
3730 tree type = TREE_TYPE (mask);
3731 unsigned int precision = TYPE_PRECISION (type);
3734 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3735 tmask = force_fit_type (tmask, 0, false, false);
3738 tree_int_cst_equal (mask,
3739 const_binop (RSHIFT_EXPR,
3740 const_binop (LSHIFT_EXPR, tmask,
3741 size_int (precision - size),
3743 size_int (precision - size), 0));
3746 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3747 represents the sign bit of EXP's type. If EXP represents a sign
3748 or zero extension, also test VAL against the unextended type.
3749 The return value is the (sub)expression whose sign bit is VAL,
3750 or NULL_TREE otherwise. */
3753 sign_bit_p (tree exp, tree val)
3755 unsigned HOST_WIDE_INT mask_lo, lo;
3756 HOST_WIDE_INT mask_hi, hi;
3760 /* Tree EXP must have an integral type. */
3761 t = TREE_TYPE (exp);
3762 if (! INTEGRAL_TYPE_P (t))
3765 /* Tree VAL must be an integer constant. */
3766 if (TREE_CODE (val) != INTEGER_CST
3767 || TREE_CONSTANT_OVERFLOW (val))
3770 width = TYPE_PRECISION (t);
3771 if (width > HOST_BITS_PER_WIDE_INT)
3773 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3776 mask_hi = ((unsigned HOST_WIDE_INT) -1
3777 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3783 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3786 mask_lo = ((unsigned HOST_WIDE_INT) -1
3787 >> (HOST_BITS_PER_WIDE_INT - width));
3790 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3791 treat VAL as if it were unsigned. */
3792 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3793 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3796 /* Handle extension from a narrower type. */
3797 if (TREE_CODE (exp) == NOP_EXPR
3798 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3799 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3804 /* Subroutine for fold_truthop: determine if an operand is simple enough
3805 to be evaluated unconditionally. */
3808 simple_operand_p (tree exp)
3810 /* Strip any conversions that don't change the machine mode. */
3813 return (CONSTANT_CLASS_P (exp)
3814 || TREE_CODE (exp) == SSA_NAME
3816 && ! TREE_ADDRESSABLE (exp)
3817 && ! TREE_THIS_VOLATILE (exp)
3818 && ! DECL_NONLOCAL (exp)
3819 /* Don't regard global variables as simple. They may be
3820 allocated in ways unknown to the compiler (shared memory,
3821 #pragma weak, etc). */
3822 && ! TREE_PUBLIC (exp)
3823 && ! DECL_EXTERNAL (exp)
3824 /* Loading a static variable is unduly expensive, but global
3825 registers aren't expensive. */
3826 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3829 /* The following functions are subroutines to fold_range_test and allow it to
3830 try to change a logical combination of comparisons into a range test.
3833 X == 2 || X == 3 || X == 4 || X == 5
3837 (unsigned) (X - 2) <= 3
3839 We describe each set of comparisons as being either inside or outside
3840 a range, using a variable named like IN_P, and then describe the
3841 range with a lower and upper bound. If one of the bounds is omitted,
3842 it represents either the highest or lowest value of the type.
3844 In the comments below, we represent a range by two numbers in brackets
3845 preceded by a "+" to designate being inside that range, or a "-" to
3846 designate being outside that range, so the condition can be inverted by
3847 flipping the prefix. An omitted bound is represented by a "-". For
3848 example, "- [-, 10]" means being outside the range starting at the lowest
3849 possible value and ending at 10, in other words, being greater than 10.
3850 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3853 We set up things so that the missing bounds are handled in a consistent
3854 manner so neither a missing bound nor "true" and "false" need to be
3855 handled using a special case. */
3857 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3858 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3859 and UPPER1_P are nonzero if the respective argument is an upper bound
3860 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3861 must be specified for a comparison. ARG1 will be converted to ARG0's
3862 type if both are specified. */
3865 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3866 tree arg1, int upper1_p)
3872 /* If neither arg represents infinity, do the normal operation.
3873 Else, if not a comparison, return infinity. Else handle the special
3874 comparison rules. Note that most of the cases below won't occur, but
3875 are handled for consistency. */
3877 if (arg0 != 0 && arg1 != 0)
3879 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3880 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3882 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3885 if (TREE_CODE_CLASS (code) != tcc_comparison)
3888 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3889 for neither. In real maths, we cannot assume open ended ranges are
3890 the same. But, this is computer arithmetic, where numbers are finite.
3891 We can therefore make the transformation of any unbounded range with
3892 the value Z, Z being greater than any representable number. This permits
3893 us to treat unbounded ranges as equal. */
3894 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3895 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3899 result = sgn0 == sgn1;
3902 result = sgn0 != sgn1;
3905 result = sgn0 < sgn1;
3908 result = sgn0 <= sgn1;
3911 result = sgn0 > sgn1;
3914 result = sgn0 >= sgn1;
3920 return constant_boolean_node (result, type);
3923 /* Given EXP, a logical expression, set the range it is testing into
3924 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3925 actually being tested. *PLOW and *PHIGH will be made of the same
3926 type as the returned expression. If EXP is not a comparison, we
3927 will most likely not be returning a useful value and range. Set
3928 *STRICT_OVERFLOW_P to true if the return value is only valid
3929 because signed overflow is undefined; otherwise, do not change
3930 *STRICT_OVERFLOW_P. */
3933 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
3934 bool *strict_overflow_p)
3936 enum tree_code code;
3937 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3938 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3940 tree low, high, n_low, n_high;
3942 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3943 and see if we can refine the range. Some of the cases below may not
3944 happen, but it doesn't seem worth worrying about this. We "continue"
3945 the outer loop when we've changed something; otherwise we "break"
3946 the switch, which will "break" the while. */
3949 low = high = build_int_cst (TREE_TYPE (exp), 0);
3953 code = TREE_CODE (exp);
3954 exp_type = TREE_TYPE (exp);
3956 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3958 if (TREE_CODE_LENGTH (code) > 0)
3959 arg0 = TREE_OPERAND (exp, 0);
3960 if (TREE_CODE_CLASS (code) == tcc_comparison
3961 || TREE_CODE_CLASS (code) == tcc_unary
3962 || TREE_CODE_CLASS (code) == tcc_binary)
3963 arg0_type = TREE_TYPE (arg0);
3964 if (TREE_CODE_CLASS (code) == tcc_binary
3965 || TREE_CODE_CLASS (code) == tcc_comparison
3966 || (TREE_CODE_CLASS (code) == tcc_expression
3967 && TREE_CODE_LENGTH (code) > 1))
3968 arg1 = TREE_OPERAND (exp, 1);
3973 case TRUTH_NOT_EXPR:
3974 in_p = ! in_p, exp = arg0;
3977 case EQ_EXPR: case NE_EXPR:
3978 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3979 /* We can only do something if the range is testing for zero
3980 and if the second operand is an integer constant. Note that
3981 saying something is "in" the range we make is done by
3982 complementing IN_P since it will set in the initial case of
3983 being not equal to zero; "out" is leaving it alone. */
3984 if (low == 0 || high == 0
3985 || ! integer_zerop (low) || ! integer_zerop (high)
3986 || TREE_CODE (arg1) != INTEGER_CST)
3991 case NE_EXPR: /* - [c, c] */
3994 case EQ_EXPR: /* + [c, c] */
3995 in_p = ! in_p, low = high = arg1;
3997 case GT_EXPR: /* - [-, c] */
3998 low = 0, high = arg1;
4000 case GE_EXPR: /* + [c, -] */
4001 in_p = ! in_p, low = arg1, high = 0;
4003 case LT_EXPR: /* - [c, -] */
4004 low = arg1, high = 0;
4006 case LE_EXPR: /* + [-, c] */
4007 in_p = ! in_p, low = 0, high = arg1;
4013 /* If this is an unsigned comparison, we also know that EXP is
4014 greater than or equal to zero. We base the range tests we make
4015 on that fact, so we record it here so we can parse existing
4016 range tests. We test arg0_type since often the return type
4017 of, e.g. EQ_EXPR, is boolean. */
4018 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4020 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4022 build_int_cst (arg0_type, 0),
4026 in_p = n_in_p, low = n_low, high = n_high;
4028 /* If the high bound is missing, but we have a nonzero low
4029 bound, reverse the range so it goes from zero to the low bound
4031 if (high == 0 && low && ! integer_zerop (low))
4034 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4035 integer_one_node, 0);
4036 low = build_int_cst (arg0_type, 0);
4044 /* (-x) IN [a,b] -> x in [-b, -a] */
4045 n_low = range_binop (MINUS_EXPR, exp_type,
4046 build_int_cst (exp_type, 0),
4048 n_high = range_binop (MINUS_EXPR, exp_type,
4049 build_int_cst (exp_type, 0),
4051 low = n_low, high = n_high;
4057 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4058 build_int_cst (exp_type, 1));
4061 case PLUS_EXPR: case MINUS_EXPR:
4062 if (TREE_CODE (arg1) != INTEGER_CST)
4065 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4066 move a constant to the other side. */
4067 if (!TYPE_UNSIGNED (arg0_type)
4068 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4071 /* If EXP is signed, any overflow in the computation is undefined,
4072 so we don't worry about it so long as our computations on
4073 the bounds don't overflow. For unsigned, overflow is defined
4074 and this is exactly the right thing. */
4075 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4076 arg0_type, low, 0, arg1, 0);
4077 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4078 arg0_type, high, 1, arg1, 0);
4079 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4080 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4083 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4084 *strict_overflow_p = true;
4086 /* Check for an unsigned range which has wrapped around the maximum
4087 value thus making n_high < n_low, and normalize it. */
4088 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4090 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4091 integer_one_node, 0);
4092 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4093 integer_one_node, 0);
4095 /* If the range is of the form +/- [ x+1, x ], we won't
4096 be able to normalize it. But then, it represents the
4097 whole range or the empty set, so make it
4099 if (tree_int_cst_equal (n_low, low)
4100 && tree_int_cst_equal (n_high, high))
4106 low = n_low, high = n_high;
4111 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4112 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4115 if (! INTEGRAL_TYPE_P (arg0_type)
4116 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4117 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4120 n_low = low, n_high = high;
4123 n_low = fold_convert (arg0_type, n_low);
4126 n_high = fold_convert (arg0_type, n_high);
4129 /* If we're converting arg0 from an unsigned type, to exp,
4130 a signed type, we will be doing the comparison as unsigned.
4131 The tests above have already verified that LOW and HIGH
4134 So we have to ensure that we will handle large unsigned
4135 values the same way that the current signed bounds treat
4138 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4141 tree equiv_type = lang_hooks.types.type_for_mode
4142 (TYPE_MODE (arg0_type), 1);
4144 /* A range without an upper bound is, naturally, unbounded.
4145 Since convert would have cropped a very large value, use
4146 the max value for the destination type. */
4148 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4149 : TYPE_MAX_VALUE (arg0_type);
4151 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4152 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4153 fold_convert (arg0_type,
4155 fold_convert (arg0_type,
4158 /* If the low bound is specified, "and" the range with the
4159 range for which the original unsigned value will be
4163 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4164 1, n_low, n_high, 1,
4165 fold_convert (arg0_type,
4170 in_p = (n_in_p == in_p);
4174 /* Otherwise, "or" the range with the range of the input
4175 that will be interpreted as negative. */
4176 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4177 0, n_low, n_high, 1,
4178 fold_convert (arg0_type,
4183 in_p = (in_p != n_in_p);
4188 low = n_low, high = n_high;
4198 /* If EXP is a constant, we can evaluate whether this is true or false. */
4199 if (TREE_CODE (exp) == INTEGER_CST)
4201 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4203 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4209 *pin_p = in_p, *plow = low, *phigh = high;
4213 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4214 type, TYPE, return an expression to test if EXP is in (or out of, depending
4215 on IN_P) the range. Return 0 if the test couldn't be created. */
4218 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4220 tree etype = TREE_TYPE (exp);
4223 #ifdef HAVE_canonicalize_funcptr_for_compare
4224 /* Disable this optimization for function pointer expressions
4225 on targets that require function pointer canonicalization. */
4226 if (HAVE_canonicalize_funcptr_for_compare
4227 && TREE_CODE (etype) == POINTER_TYPE
4228 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4234 value = build_range_check (type, exp, 1, low, high);
4236 return invert_truthvalue (value);
4241 if (low == 0 && high == 0)
4242 return build_int_cst (type, 1);
4245 return fold_build2 (LE_EXPR, type, exp,
4246 fold_convert (etype, high));
4249 return fold_build2 (GE_EXPR, type, exp,
4250 fold_convert (etype, low));
4252 if (operand_equal_p (low, high, 0))
4253 return fold_build2 (EQ_EXPR, type, exp,
4254 fold_convert (etype, low));
4256 if (integer_zerop (low))
4258 if (! TYPE_UNSIGNED (etype))
4260 etype = lang_hooks.types.unsigned_type (etype);
4261 high = fold_convert (etype, high);
4262 exp = fold_convert (etype, exp);
4264 return build_range_check (type, exp, 1, 0, high);
4267 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4268 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4270 unsigned HOST_WIDE_INT lo;
4274 prec = TYPE_PRECISION (etype);
4275 if (prec <= HOST_BITS_PER_WIDE_INT)
4278 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4282 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4283 lo = (unsigned HOST_WIDE_INT) -1;
4286 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4288 if (TYPE_UNSIGNED (etype))
4290 etype = lang_hooks.types.signed_type (etype);
4291 exp = fold_convert (etype, exp);
4293 return fold_build2 (GT_EXPR, type, exp,
4294 build_int_cst (etype, 0));
4298 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4299 This requires wrap-around arithmetics for the type of the expression. */
4300 switch (TREE_CODE (etype))
4303 /* There is no requirement that LOW be within the range of ETYPE
4304 if the latter is a subtype. It must, however, be within the base
4305 type of ETYPE. So be sure we do the subtraction in that type. */
4306 if (TREE_TYPE (etype))
4307 etype = TREE_TYPE (etype);
4312 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4313 TYPE_UNSIGNED (etype));
4320 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4321 if (TREE_CODE (etype) == INTEGER_TYPE
4322 && !TYPE_OVERFLOW_WRAPS (etype))
4324 tree utype, minv, maxv;
4326 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4327 for the type in question, as we rely on this here. */
4328 utype = lang_hooks.types.unsigned_type (etype);
4329 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4330 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4331 integer_one_node, 1);
4332 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4334 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4341 high = fold_convert (etype, high);
4342 low = fold_convert (etype, low);
4343 exp = fold_convert (etype, exp);
4345 value = const_binop (MINUS_EXPR, high, low, 0);
4347 if (value != 0 && !TREE_OVERFLOW (value))
4348 return build_range_check (type,
4349 fold_build2 (MINUS_EXPR, etype, exp, low),
4350 1, build_int_cst (etype, 0), value);
4355 /* Return the predecessor of VAL in its type, handling the infinite case. */
4358 range_predecessor (tree val)
4360 tree type = TREE_TYPE (val);
4362 if (INTEGRAL_TYPE_P (type)
4363 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4366 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4369 /* Return the successor of VAL in its type, handling the infinite case. */
4372 range_successor (tree val)
4374 tree type = TREE_TYPE (val);
4376 if (INTEGRAL_TYPE_P (type)
4377 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4380 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4383 /* Given two ranges, see if we can merge them into one. Return 1 if we
4384 can, 0 if we can't. Set the output range into the specified parameters. */
4387 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4388 tree high0, int in1_p, tree low1, tree high1)
4396 int lowequal = ((low0 == 0 && low1 == 0)
4397 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4398 low0, 0, low1, 0)));
4399 int highequal = ((high0 == 0 && high1 == 0)
4400 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4401 high0, 1, high1, 1)));
4403 /* Make range 0 be the range that starts first, or ends last if they
4404 start at the same value. Swap them if it isn't. */
4405 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4408 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4409 high1, 1, high0, 1))))
4411 temp = in0_p, in0_p = in1_p, in1_p = temp;
4412 tem = low0, low0 = low1, low1 = tem;
4413 tem = high0, high0 = high1, high1 = tem;
4416 /* Now flag two cases, whether the ranges are disjoint or whether the
4417 second range is totally subsumed in the first. Note that the tests
4418 below are simplified by the ones above. */
4419 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4420 high0, 1, low1, 0));
4421 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4422 high1, 1, high0, 1));
4424 /* We now have four cases, depending on whether we are including or
4425 excluding the two ranges. */
4428 /* If they don't overlap, the result is false. If the second range
4429 is a subset it is the result. Otherwise, the range is from the start
4430 of the second to the end of the first. */
4432 in_p = 0, low = high = 0;
4434 in_p = 1, low = low1, high = high1;
4436 in_p = 1, low = low1, high = high0;
4439 else if (in0_p && ! in1_p)
4441 /* If they don't overlap, the result is the first range. If they are
4442 equal, the result is false. If the second range is a subset of the
4443 first, and the ranges begin at the same place, we go from just after
4444 the end of the second range to the end of the first. If the second
4445 range is not a subset of the first, or if it is a subset and both
4446 ranges end at the same place, the range starts at the start of the
4447 first range and ends just before the second range.
4448 Otherwise, we can't describe this as a single range. */
4450 in_p = 1, low = low0, high = high0;
4451 else if (lowequal && highequal)
4452 in_p = 0, low = high = 0;
4453 else if (subset && lowequal)
4455 low = range_successor (high1);
4460 /* We are in the weird situation where high0 > high1 but
4461 high1 has no successor. Punt. */
4465 else if (! subset || highequal)
4468 high = range_predecessor (low1);
4472 /* low0 < low1 but low1 has no predecessor. Punt. */
4480 else if (! in0_p && in1_p)
4482 /* If they don't overlap, the result is the second range. If the second
4483 is a subset of the first, the result is false. Otherwise,
4484 the range starts just after the first range and ends at the
4485 end of the second. */
4487 in_p = 1, low = low1, high = high1;
4488 else if (subset || highequal)
4489 in_p = 0, low = high = 0;
4492 low = range_successor (high0);
4497 /* high1 > high0 but high0 has no successor. Punt. */
4505 /* The case where we are excluding both ranges. Here the complex case
4506 is if they don't overlap. In that case, the only time we have a
4507 range is if they are adjacent. If the second is a subset of the
4508 first, the result is the first. Otherwise, the range to exclude
4509 starts at the beginning of the first range and ends at the end of the
4513 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4514 range_successor (high0),
4516 in_p = 0, low = low0, high = high1;
4519 /* Canonicalize - [min, x] into - [-, x]. */
4520 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4521 switch (TREE_CODE (TREE_TYPE (low0)))
4524 if (TYPE_PRECISION (TREE_TYPE (low0))
4525 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4529 if (tree_int_cst_equal (low0,
4530 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4534 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4535 && integer_zerop (low0))
4542 /* Canonicalize - [x, max] into - [x, -]. */
4543 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4544 switch (TREE_CODE (TREE_TYPE (high1)))
4547 if (TYPE_PRECISION (TREE_TYPE (high1))
4548 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4552 if (tree_int_cst_equal (high1,
4553 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4557 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4558 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4560 integer_one_node, 1)))
4567 /* The ranges might be also adjacent between the maximum and
4568 minimum values of the given type. For
4569 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4570 return + [x + 1, y - 1]. */
4571 if (low0 == 0 && high1 == 0)
4573 low = range_successor (high0);
4574 high = range_predecessor (low1);
4575 if (low == 0 || high == 0)
4585 in_p = 0, low = low0, high = high0;
4587 in_p = 0, low = low0, high = high1;
4590 *pin_p = in_p, *plow = low, *phigh = high;
4595 /* Subroutine of fold, looking inside expressions of the form
4596 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4597 of the COND_EXPR. This function is being used also to optimize
4598 A op B ? C : A, by reversing the comparison first.
4600 Return a folded expression whose code is not a COND_EXPR
4601 anymore, or NULL_TREE if no folding opportunity is found. */
4604 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4606 enum tree_code comp_code = TREE_CODE (arg0);
4607 tree arg00 = TREE_OPERAND (arg0, 0);
4608 tree arg01 = TREE_OPERAND (arg0, 1);
4609 tree arg1_type = TREE_TYPE (arg1);
4615 /* If we have A op 0 ? A : -A, consider applying the following
4618 A == 0? A : -A same as -A
4619 A != 0? A : -A same as A
4620 A >= 0? A : -A same as abs (A)
4621 A > 0? A : -A same as abs (A)
4622 A <= 0? A : -A same as -abs (A)
4623 A < 0? A : -A same as -abs (A)
4625 None of these transformations work for modes with signed
4626 zeros. If A is +/-0, the first two transformations will
4627 change the sign of the result (from +0 to -0, or vice
4628 versa). The last four will fix the sign of the result,
4629 even though the original expressions could be positive or
4630 negative, depending on the sign of A.
4632 Note that all these transformations are correct if A is
4633 NaN, since the two alternatives (A and -A) are also NaNs. */
4634 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4635 ? real_zerop (arg01)
4636 : integer_zerop (arg01))
4637 && ((TREE_CODE (arg2) == NEGATE_EXPR
4638 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4639 /* In the case that A is of the form X-Y, '-A' (arg2) may
4640 have already been folded to Y-X, check for that. */
4641 || (TREE_CODE (arg1) == MINUS_EXPR
4642 && TREE_CODE (arg2) == MINUS_EXPR
4643 && operand_equal_p (TREE_OPERAND (arg1, 0),
4644 TREE_OPERAND (arg2, 1), 0)
4645 && operand_equal_p (TREE_OPERAND (arg1, 1),
4646 TREE_OPERAND (arg2, 0), 0))))
4651 tem = fold_convert (arg1_type, arg1);
4652 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4655 return pedantic_non_lvalue (fold_convert (type, arg1));
4658 if (flag_trapping_math)
4663 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4664 arg1 = fold_convert (lang_hooks.types.signed_type
4665 (TREE_TYPE (arg1)), arg1);
4666 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4667 return pedantic_non_lvalue (fold_convert (type, tem));
4670 if (flag_trapping_math)
4674 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4675 arg1 = fold_convert (lang_hooks.types.signed_type
4676 (TREE_TYPE (arg1)), arg1);
4677 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4678 return negate_expr (fold_convert (type, tem));
4680 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4684 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4685 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4686 both transformations are correct when A is NaN: A != 0
4687 is then true, and A == 0 is false. */
4689 if (integer_zerop (arg01) && integer_zerop (arg2))
4691 if (comp_code == NE_EXPR)
4692 return pedantic_non_lvalue (fold_convert (type, arg1));
4693 else if (comp_code == EQ_EXPR)
4694 return build_int_cst (type, 0);
4697 /* Try some transformations of A op B ? A : B.
4699 A == B? A : B same as B
4700 A != B? A : B same as A
4701 A >= B? A : B same as max (A, B)
4702 A > B? A : B same as max (B, A)
4703 A <= B? A : B same as min (A, B)
4704 A < B? A : B same as min (B, A)
4706 As above, these transformations don't work in the presence
4707 of signed zeros. For example, if A and B are zeros of
4708 opposite sign, the first two transformations will change
4709 the sign of the result. In the last four, the original
4710 expressions give different results for (A=+0, B=-0) and
4711 (A=-0, B=+0), but the transformed expressions do not.
4713 The first two transformations are correct if either A or B
4714 is a NaN. In the first transformation, the condition will
4715 be false, and B will indeed be chosen. In the case of the
4716 second transformation, the condition A != B will be true,
4717 and A will be chosen.
4719 The conversions to max() and min() are not correct if B is
4720 a number and A is not. The conditions in the original
4721 expressions will be false, so all four give B. The min()
4722 and max() versions would give a NaN instead. */
4723 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4724 /* Avoid these transformations if the COND_EXPR may be used
4725 as an lvalue in the C++ front-end. PR c++/19199. */
4727 || (strcmp (lang_hooks.name, "GNU C++") != 0
4728 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4729 || ! maybe_lvalue_p (arg1)
4730 || ! maybe_lvalue_p (arg2)))
4732 tree comp_op0 = arg00;
4733 tree comp_op1 = arg01;
4734 tree comp_type = TREE_TYPE (comp_op0);
4736 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4737 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4747 return pedantic_non_lvalue (fold_convert (type, arg2));
4749 return pedantic_non_lvalue (fold_convert (type, arg1));
4754 /* In C++ a ?: expression can be an lvalue, so put the
4755 operand which will be used if they are equal first
4756 so that we can convert this back to the
4757 corresponding COND_EXPR. */
4758 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4760 comp_op0 = fold_convert (comp_type, comp_op0);
4761 comp_op1 = fold_convert (comp_type, comp_op1);
4762 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4763 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4764 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4765 return pedantic_non_lvalue (fold_convert (type, tem));
4772 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4774 comp_op0 = fold_convert (comp_type, comp_op0);
4775 comp_op1 = fold_convert (comp_type, comp_op1);
4776 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4777 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4778 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4779 return pedantic_non_lvalue (fold_convert (type, tem));
4783 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4784 return pedantic_non_lvalue (fold_convert (type, arg2));
4787 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4788 return pedantic_non_lvalue (fold_convert (type, arg1));
4791 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4796 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4797 we might still be able to simplify this. For example,
4798 if C1 is one less or one more than C2, this might have started
4799 out as a MIN or MAX and been transformed by this function.
4800 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4802 if (INTEGRAL_TYPE_P (type)
4803 && TREE_CODE (arg01) == INTEGER_CST
4804 && TREE_CODE (arg2) == INTEGER_CST)
4808 /* We can replace A with C1 in this case. */
4809 arg1 = fold_convert (type, arg01);
4810 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4813 /* If C1 is C2 + 1, this is min(A, C2). */
4814 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4816 && operand_equal_p (arg01,
4817 const_binop (PLUS_EXPR, arg2,
4818 integer_one_node, 0),
4820 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4825 /* If C1 is C2 - 1, this is min(A, C2). */
4826 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4828 && operand_equal_p (arg01,
4829 const_binop (MINUS_EXPR, arg2,
4830 integer_one_node, 0),
4832 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4837 /* If C1 is C2 - 1, this is max(A, C2). */
4838 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4840 && operand_equal_p (arg01,
4841 const_binop (MINUS_EXPR, arg2,
4842 integer_one_node, 0),
4844 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4849 /* If C1 is C2 + 1, this is max(A, C2). */
4850 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4852 && operand_equal_p (arg01,
4853 const_binop (PLUS_EXPR, arg2,
4854 integer_one_node, 0),
4856 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4870 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4871 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4874 /* EXP is some logical combination of boolean tests. See if we can
4875 merge it into some range test. Return the new tree if so. */
4878 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4880 int or_op = (code == TRUTH_ORIF_EXPR
4881 || code == TRUTH_OR_EXPR);
4882 int in0_p, in1_p, in_p;
4883 tree low0, low1, low, high0, high1, high;
4884 bool strict_overflow_p = false;
4885 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4886 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4888 const char * const warnmsg = G_("assuming signed overflow does not occur "
4889 "when simplifying range test");
4891 /* If this is an OR operation, invert both sides; we will invert
4892 again at the end. */
4894 in0_p = ! in0_p, in1_p = ! in1_p;
4896 /* If both expressions are the same, if we can merge the ranges, and we
4897 can build the range test, return it or it inverted. If one of the
4898 ranges is always true or always false, consider it to be the same
4899 expression as the other. */
4900 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4901 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4903 && 0 != (tem = (build_range_check (type,
4905 : rhs != 0 ? rhs : integer_zero_node,
4908 if (strict_overflow_p)
4909 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4910 return or_op ? invert_truthvalue (tem) : tem;
4913 /* On machines where the branch cost is expensive, if this is a
4914 short-circuited branch and the underlying object on both sides
4915 is the same, make a non-short-circuit operation. */
4916 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4917 && lhs != 0 && rhs != 0
4918 && (code == TRUTH_ANDIF_EXPR
4919 || code == TRUTH_ORIF_EXPR)
4920 && operand_equal_p (lhs, rhs, 0))
4922 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4923 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4924 which cases we can't do this. */
4925 if (simple_operand_p (lhs))
4926 return build2 (code == TRUTH_ANDIF_EXPR
4927 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4930 else if (lang_hooks.decls.global_bindings_p () == 0
4931 && ! CONTAINS_PLACEHOLDER_P (lhs))
4933 tree common = save_expr (lhs);
4935 if (0 != (lhs = build_range_check (type, common,
4936 or_op ? ! in0_p : in0_p,
4938 && (0 != (rhs = build_range_check (type, common,
4939 or_op ? ! in1_p : in1_p,
4942 if (strict_overflow_p)
4943 fold_overflow_warning (warnmsg,
4944 WARN_STRICT_OVERFLOW_COMPARISON);
4945 return build2 (code == TRUTH_ANDIF_EXPR
4946 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4955 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4956 bit value. Arrange things so the extra bits will be set to zero if and
4957 only if C is signed-extended to its full width. If MASK is nonzero,
4958 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4961 unextend (tree c, int p, int unsignedp, tree mask)
4963 tree type = TREE_TYPE (c);
4964 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4967 if (p == modesize || unsignedp)
4970 /* We work by getting just the sign bit into the low-order bit, then
4971 into the high-order bit, then sign-extend. We then XOR that value
4973 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4974 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4976 /* We must use a signed type in order to get an arithmetic right shift.
4977 However, we must also avoid introducing accidental overflows, so that
4978 a subsequent call to integer_zerop will work. Hence we must
4979 do the type conversion here. At this point, the constant is either
4980 zero or one, and the conversion to a signed type can never overflow.
4981 We could get an overflow if this conversion is done anywhere else. */
4982 if (TYPE_UNSIGNED (type))
4983 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4985 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4986 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4988 temp = const_binop (BIT_AND_EXPR, temp,
4989 fold_convert (TREE_TYPE (c), mask), 0);
4990 /* If necessary, convert the type back to match the type of C. */
4991 if (TYPE_UNSIGNED (type))
4992 temp = fold_convert (type, temp);
4994 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4997 /* Find ways of folding logical expressions of LHS and RHS:
4998 Try to merge two comparisons to the same innermost item.
4999 Look for range tests like "ch >= '0' && ch <= '9'".
5000 Look for combinations of simple terms on machines with expensive branches
5001 and evaluate the RHS unconditionally.
5003 For example, if we have p->a == 2 && p->b == 4 and we can make an
5004 object large enough to span both A and B, we can do this with a comparison
5005 against the object ANDed with the a mask.
5007 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5008 operations to do this with one comparison.
5010 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5011 function and the one above.
5013 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5014 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5016 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5019 We return the simplified tree or 0 if no optimization is possible. */
5022 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5024 /* If this is the "or" of two comparisons, we can do something if
5025 the comparisons are NE_EXPR. If this is the "and", we can do something
5026 if the comparisons are EQ_EXPR. I.e.,
5027 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5029 WANTED_CODE is this operation code. For single bit fields, we can
5030 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5031 comparison for one-bit fields. */
5033 enum tree_code wanted_code;
5034 enum tree_code lcode, rcode;
5035 tree ll_arg, lr_arg, rl_arg, rr_arg;
5036 tree ll_inner, lr_inner, rl_inner, rr_inner;
5037 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5038 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5039 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5040 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5041 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5042 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5043 enum machine_mode lnmode, rnmode;
5044 tree ll_mask, lr_mask, rl_mask, rr_mask;
5045 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5046 tree l_const, r_const;
5047 tree lntype, rntype, result;
5048 int first_bit, end_bit;
5050 tree orig_lhs = lhs, orig_rhs = rhs;
5051 enum tree_code orig_code = code;
5053 /* Start by getting the comparison codes. Fail if anything is volatile.
5054 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5055 it were surrounded with a NE_EXPR. */
5057 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5060 lcode = TREE_CODE (lhs);
5061 rcode = TREE_CODE (rhs);
5063 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5065 lhs = build2 (NE_EXPR, truth_type, lhs,
5066 build_int_cst (TREE_TYPE (lhs), 0));
5070 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5072 rhs = build2 (NE_EXPR, truth_type, rhs,
5073 build_int_cst (TREE_TYPE (rhs), 0));
5077 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5078 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5081 ll_arg = TREE_OPERAND (lhs, 0);
5082 lr_arg = TREE_OPERAND (lhs, 1);
5083 rl_arg = TREE_OPERAND (rhs, 0);
5084 rr_arg = TREE_OPERAND (rhs, 1);
5086 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5087 if (simple_operand_p (ll_arg)
5088 && simple_operand_p (lr_arg))
5091 if (operand_equal_p (ll_arg, rl_arg, 0)
5092 && operand_equal_p (lr_arg, rr_arg, 0))
5094 result = combine_comparisons (code, lcode, rcode,
5095 truth_type, ll_arg, lr_arg);
5099 else if (operand_equal_p (ll_arg, rr_arg, 0)
5100 && operand_equal_p (lr_arg, rl_arg, 0))
5102 result = combine_comparisons (code, lcode,
5103 swap_tree_comparison (rcode),
5104 truth_type, ll_arg, lr_arg);
5110 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5111 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5113 /* If the RHS can be evaluated unconditionally and its operands are
5114 simple, it wins to evaluate the RHS unconditionally on machines
5115 with expensive branches. In this case, this isn't a comparison
5116 that can be merged. Avoid doing this if the RHS is a floating-point
5117 comparison since those can trap. */
5119 if (BRANCH_COST >= 2
5120 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5121 && simple_operand_p (rl_arg)
5122 && simple_operand_p (rr_arg))
5124 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5125 if (code == TRUTH_OR_EXPR
5126 && lcode == NE_EXPR && integer_zerop (lr_arg)
5127 && rcode == NE_EXPR && integer_zerop (rr_arg)
5128 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5129 return build2 (NE_EXPR, truth_type,
5130 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5132 build_int_cst (TREE_TYPE (ll_arg), 0));
5134 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5135 if (code == TRUTH_AND_EXPR
5136 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5137 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5138 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5139 return build2 (EQ_EXPR, truth_type,
5140 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5142 build_int_cst (TREE_TYPE (ll_arg), 0));
5144 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5146 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5147 return build2 (code, truth_type, lhs, rhs);
5152 /* See if the comparisons can be merged. Then get all the parameters for
5155 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5156 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5160 ll_inner = decode_field_reference (ll_arg,
5161 &ll_bitsize, &ll_bitpos, &ll_mode,
5162 &ll_unsignedp, &volatilep, &ll_mask,
5164 lr_inner = decode_field_reference (lr_arg,
5165 &lr_bitsize, &lr_bitpos, &lr_mode,
5166 &lr_unsignedp, &volatilep, &lr_mask,
5168 rl_inner = decode_field_reference (rl_arg,
5169 &rl_bitsize, &rl_bitpos, &rl_mode,
5170 &rl_unsignedp, &volatilep, &rl_mask,
5172 rr_inner = decode_field_reference (rr_arg,
5173 &rr_bitsize, &rr_bitpos, &rr_mode,
5174 &rr_unsignedp, &volatilep, &rr_mask,
5177 /* It must be true that the inner operation on the lhs of each
5178 comparison must be the same if we are to be able to do anything.
5179 Then see if we have constants. If not, the same must be true for
5181 if (volatilep || ll_inner == 0 || rl_inner == 0
5182 || ! operand_equal_p (ll_inner, rl_inner, 0))
5185 if (TREE_CODE (lr_arg) == INTEGER_CST
5186 && TREE_CODE (rr_arg) == INTEGER_CST)
5187 l_const = lr_arg, r_const = rr_arg;
5188 else if (lr_inner == 0 || rr_inner == 0
5189 || ! operand_equal_p (lr_inner, rr_inner, 0))
5192 l_const = r_const = 0;
5194 /* If either comparison code is not correct for our logical operation,
5195 fail. However, we can convert a one-bit comparison against zero into
5196 the opposite comparison against that bit being set in the field. */
5198 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5199 if (lcode != wanted_code)
5201 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5203 /* Make the left operand unsigned, since we are only interested
5204 in the value of one bit. Otherwise we are doing the wrong
5213 /* This is analogous to the code for l_const above. */
5214 if (rcode != wanted_code)
5216 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5225 /* After this point all optimizations will generate bit-field
5226 references, which we might not want. */
5227 if (! lang_hooks.can_use_bit_fields_p ())
5230 /* See if we can find a mode that contains both fields being compared on
5231 the left. If we can't, fail. Otherwise, update all constants and masks
5232 to be relative to a field of that size. */
5233 first_bit = MIN (ll_bitpos, rl_bitpos);
5234 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5235 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5236 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5238 if (lnmode == VOIDmode)
5241 lnbitsize = GET_MODE_BITSIZE (lnmode);
5242 lnbitpos = first_bit & ~ (lnbitsize - 1);
5243 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5244 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5246 if (BYTES_BIG_ENDIAN)
5248 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5249 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5252 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5253 size_int (xll_bitpos), 0);
5254 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5255 size_int (xrl_bitpos), 0);
5259 l_const = fold_convert (lntype, l_const);
5260 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5261 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5262 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5263 fold_build1 (BIT_NOT_EXPR,
5267 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5269 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5274 r_const = fold_convert (lntype, r_const);
5275 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5276 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5277 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5278 fold_build1 (BIT_NOT_EXPR,
5282 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5284 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5288 /* If the right sides are not constant, do the same for it. Also,
5289 disallow this optimization if a size or signedness mismatch occurs
5290 between the left and right sides. */
5293 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5294 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5295 /* Make sure the two fields on the right
5296 correspond to the left without being swapped. */
5297 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5300 first_bit = MIN (lr_bitpos, rr_bitpos);
5301 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5302 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5303 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5305 if (rnmode == VOIDmode)
5308 rnbitsize = GET_MODE_BITSIZE (rnmode);
5309 rnbitpos = first_bit & ~ (rnbitsize - 1);
5310 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5311 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5313 if (BYTES_BIG_ENDIAN)
5315 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5316 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5319 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5320 size_int (xlr_bitpos), 0);
5321 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5322 size_int (xrr_bitpos), 0);
5324 /* Make a mask that corresponds to both fields being compared.
5325 Do this for both items being compared. If the operands are the
5326 same size and the bits being compared are in the same position
5327 then we can do this by masking both and comparing the masked
5329 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5330 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5331 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5333 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5334 ll_unsignedp || rl_unsignedp);
5335 if (! all_ones_mask_p (ll_mask, lnbitsize))
5336 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5338 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5339 lr_unsignedp || rr_unsignedp);
5340 if (! all_ones_mask_p (lr_mask, rnbitsize))
5341 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5343 return build2 (wanted_code, truth_type, lhs, rhs);
5346 /* There is still another way we can do something: If both pairs of
5347 fields being compared are adjacent, we may be able to make a wider
5348 field containing them both.
5350 Note that we still must mask the lhs/rhs expressions. Furthermore,
5351 the mask must be shifted to account for the shift done by
5352 make_bit_field_ref. */
5353 if ((ll_bitsize + ll_bitpos == rl_bitpos
5354 && lr_bitsize + lr_bitpos == rr_bitpos)
5355 || (ll_bitpos == rl_bitpos + rl_bitsize
5356 && lr_bitpos == rr_bitpos + rr_bitsize))
5360 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5361 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5362 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5363 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5365 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5366 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5367 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5368 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5370 /* Convert to the smaller type before masking out unwanted bits. */
5372 if (lntype != rntype)
5374 if (lnbitsize > rnbitsize)
5376 lhs = fold_convert (rntype, lhs);
5377 ll_mask = fold_convert (rntype, ll_mask);
5380 else if (lnbitsize < rnbitsize)
5382 rhs = fold_convert (lntype, rhs);
5383 lr_mask = fold_convert (lntype, lr_mask);
5388 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5389 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5391 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5392 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5394 return build2 (wanted_code, truth_type, lhs, rhs);
5400 /* Handle the case of comparisons with constants. If there is something in
5401 common between the masks, those bits of the constants must be the same.
5402 If not, the condition is always false. Test for this to avoid generating
5403 incorrect code below. */
5404 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5405 if (! integer_zerop (result)
5406 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5407 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5409 if (wanted_code == NE_EXPR)
5411 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5412 return constant_boolean_node (true, truth_type);
5416 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5417 return constant_boolean_node (false, truth_type);
5421 /* Construct the expression we will return. First get the component
5422 reference we will make. Unless the mask is all ones the width of
5423 that field, perform the mask operation. Then compare with the
5425 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5426 ll_unsignedp || rl_unsignedp);
5428 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5429 if (! all_ones_mask_p (ll_mask, lnbitsize))
5430 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5432 return build2 (wanted_code, truth_type, result,
5433 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5436 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5440 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5443 enum tree_code op_code;
5444 tree comp_const = op1;
5446 int consts_equal, consts_lt;
5449 STRIP_SIGN_NOPS (arg0);
5451 op_code = TREE_CODE (arg0);
5452 minmax_const = TREE_OPERAND (arg0, 1);
5453 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5454 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5455 inner = TREE_OPERAND (arg0, 0);
5457 /* If something does not permit us to optimize, return the original tree. */
5458 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5459 || TREE_CODE (comp_const) != INTEGER_CST
5460 || TREE_CONSTANT_OVERFLOW (comp_const)
5461 || TREE_CODE (minmax_const) != INTEGER_CST
5462 || TREE_CONSTANT_OVERFLOW (minmax_const))
5465 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5466 and GT_EXPR, doing the rest with recursive calls using logical
5470 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5472 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5475 return invert_truthvalue (tem);
5481 fold_build2 (TRUTH_ORIF_EXPR, type,
5482 optimize_minmax_comparison
5483 (EQ_EXPR, type, arg0, comp_const),
5484 optimize_minmax_comparison
5485 (GT_EXPR, type, arg0, comp_const));
5488 if (op_code == MAX_EXPR && consts_equal)
5489 /* MAX (X, 0) == 0 -> X <= 0 */
5490 return fold_build2 (LE_EXPR, type, inner, comp_const);
5492 else if (op_code == MAX_EXPR && consts_lt)
5493 /* MAX (X, 0) == 5 -> X == 5 */
5494 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5496 else if (op_code == MAX_EXPR)
5497 /* MAX (X, 0) == -1 -> false */
5498 return omit_one_operand (type, integer_zero_node, inner);
5500 else if (consts_equal)
5501 /* MIN (X, 0) == 0 -> X >= 0 */
5502 return fold_build2 (GE_EXPR, type, inner, comp_const);
5505 /* MIN (X, 0) == 5 -> false */
5506 return omit_one_operand (type, integer_zero_node, inner);
5509 /* MIN (X, 0) == -1 -> X == -1 */
5510 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5513 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5514 /* MAX (X, 0) > 0 -> X > 0
5515 MAX (X, 0) > 5 -> X > 5 */
5516 return fold_build2 (GT_EXPR, type, inner, comp_const);
5518 else if (op_code == MAX_EXPR)
5519 /* MAX (X, 0) > -1 -> true */
5520 return omit_one_operand (type, integer_one_node, inner);
5522 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5523 /* MIN (X, 0) > 0 -> false
5524 MIN (X, 0) > 5 -> false */
5525 return omit_one_operand (type, integer_zero_node, inner);
5528 /* MIN (X, 0) > -1 -> X > -1 */
5529 return fold_build2 (GT_EXPR, type, inner, comp_const);
5536 /* T is an integer expression that is being multiplied, divided, or taken a
5537 modulus (CODE says which and what kind of divide or modulus) by a
5538 constant C. See if we can eliminate that operation by folding it with
5539 other operations already in T. WIDE_TYPE, if non-null, is a type that
5540 should be used for the computation if wider than our type.
5542 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5543 (X * 2) + (Y * 4). We must, however, be assured that either the original
5544 expression would not overflow or that overflow is undefined for the type
5545 in the language in question.
5547 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5548 the machine has a multiply-accumulate insn or that this is part of an
5549 addressing calculation.
5551 If we return a non-null expression, it is an equivalent form of the
5552 original computation, but need not be in the original type.
5554 We set *STRICT_OVERFLOW_P to true if the return values depends on
5555 signed overflow being undefined. Otherwise we do not change
5556 *STRICT_OVERFLOW_P. */
5559 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5560 bool *strict_overflow_p)
5562 /* To avoid exponential search depth, refuse to allow recursion past
5563 three levels. Beyond that (1) it's highly unlikely that we'll find
5564 something interesting and (2) we've probably processed it before
5565 when we built the inner expression. */
5574 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5581 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5582 bool *strict_overflow_p)
5584 tree type = TREE_TYPE (t);
5585 enum tree_code tcode = TREE_CODE (t);
5586 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5587 > GET_MODE_SIZE (TYPE_MODE (type)))
5588 ? wide_type : type);
5590 int same_p = tcode == code;
5591 tree op0 = NULL_TREE, op1 = NULL_TREE;
5592 bool sub_strict_overflow_p;
5594 /* Don't deal with constants of zero here; they confuse the code below. */
5595 if (integer_zerop (c))
5598 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5599 op0 = TREE_OPERAND (t, 0);
5601 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5602 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5604 /* Note that we need not handle conditional operations here since fold
5605 already handles those cases. So just do arithmetic here. */
5609 /* For a constant, we can always simplify if we are a multiply
5610 or (for divide and modulus) if it is a multiple of our constant. */
5611 if (code == MULT_EXPR
5612 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5613 return const_binop (code, fold_convert (ctype, t),
5614 fold_convert (ctype, c), 0);
5617 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5618 /* If op0 is an expression ... */
5619 if ((COMPARISON_CLASS_P (op0)
5620 || UNARY_CLASS_P (op0)
5621 || BINARY_CLASS_P (op0)
5622 || EXPRESSION_CLASS_P (op0))
5623 /* ... and is unsigned, and its type is smaller than ctype,
5624 then we cannot pass through as widening. */
5625 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5626 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5627 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5628 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5629 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5630 /* ... or this is a truncation (t is narrower than op0),
5631 then we cannot pass through this narrowing. */
5632 || (GET_MODE_SIZE (TYPE_MODE (type))
5633 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5634 /* ... or signedness changes for division or modulus,
5635 then we cannot pass through this conversion. */
5636 || (code != MULT_EXPR
5637 && (TYPE_UNSIGNED (ctype)
5638 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5641 /* Pass the constant down and see if we can make a simplification. If
5642 we can, replace this expression with the inner simplification for
5643 possible later conversion to our or some other type. */
5644 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5645 && TREE_CODE (t2) == INTEGER_CST
5646 && ! TREE_CONSTANT_OVERFLOW (t2)
5647 && (0 != (t1 = extract_muldiv (op0, t2, code,
5649 ? ctype : NULL_TREE,
5650 strict_overflow_p))))
5655 /* If widening the type changes it from signed to unsigned, then we
5656 must avoid building ABS_EXPR itself as unsigned. */
5657 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5659 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5660 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5663 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5664 return fold_convert (ctype, t1);
5668 /* If the constant is negative, we cannot simplify this. */
5669 if (tree_int_cst_sgn (c) == -1)
5673 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5675 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5678 case MIN_EXPR: case MAX_EXPR:
5679 /* If widening the type changes the signedness, then we can't perform
5680 this optimization as that changes the result. */
5681 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5684 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5685 sub_strict_overflow_p = false;
5686 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5687 &sub_strict_overflow_p)) != 0
5688 && (t2 = extract_muldiv (op1, c, code, wide_type,
5689 &sub_strict_overflow_p)) != 0)
5691 if (tree_int_cst_sgn (c) < 0)
5692 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5693 if (sub_strict_overflow_p)
5694 *strict_overflow_p = true;
5695 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5696 fold_convert (ctype, t2));
5700 case LSHIFT_EXPR: case RSHIFT_EXPR:
5701 /* If the second operand is constant, this is a multiplication
5702 or floor division, by a power of two, so we can treat it that
5703 way unless the multiplier or divisor overflows. Signed
5704 left-shift overflow is implementation-defined rather than
5705 undefined in C90, so do not convert signed left shift into
5707 if (TREE_CODE (op1) == INTEGER_CST
5708 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5709 /* const_binop may not detect overflow correctly,
5710 so check for it explicitly here. */
5711 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5712 && TREE_INT_CST_HIGH (op1) == 0
5713 && 0 != (t1 = fold_convert (ctype,
5714 const_binop (LSHIFT_EXPR,
5717 && ! TREE_OVERFLOW (t1))
5718 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5719 ? MULT_EXPR : FLOOR_DIV_EXPR,
5720 ctype, fold_convert (ctype, op0), t1),
5721 c, code, wide_type, strict_overflow_p);
5724 case PLUS_EXPR: case MINUS_EXPR:
5725 /* See if we can eliminate the operation on both sides. If we can, we
5726 can return a new PLUS or MINUS. If we can't, the only remaining
5727 cases where we can do anything are if the second operand is a
5729 sub_strict_overflow_p = false;
5730 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5731 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5732 if (t1 != 0 && t2 != 0
5733 && (code == MULT_EXPR
5734 /* If not multiplication, we can only do this if both operands
5735 are divisible by c. */
5736 || (multiple_of_p (ctype, op0, c)
5737 && multiple_of_p (ctype, op1, c))))
5739 if (sub_strict_overflow_p)
5740 *strict_overflow_p = true;
5741 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5742 fold_convert (ctype, t2));
5745 /* If this was a subtraction, negate OP1 and set it to be an addition.
5746 This simplifies the logic below. */
5747 if (tcode == MINUS_EXPR)
5748 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5750 if (TREE_CODE (op1) != INTEGER_CST)
5753 /* If either OP1 or C are negative, this optimization is not safe for
5754 some of the division and remainder types while for others we need
5755 to change the code. */
5756 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5758 if (code == CEIL_DIV_EXPR)
5759 code = FLOOR_DIV_EXPR;
5760 else if (code == FLOOR_DIV_EXPR)
5761 code = CEIL_DIV_EXPR;
5762 else if (code != MULT_EXPR
5763 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5767 /* If it's a multiply or a division/modulus operation of a multiple
5768 of our constant, do the operation and verify it doesn't overflow. */
5769 if (code == MULT_EXPR
5770 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5772 op1 = const_binop (code, fold_convert (ctype, op1),
5773 fold_convert (ctype, c), 0);
5774 /* We allow the constant to overflow with wrapping semantics. */
5776 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5782 /* If we have an unsigned type is not a sizetype, we cannot widen
5783 the operation since it will change the result if the original
5784 computation overflowed. */
5785 if (TYPE_UNSIGNED (ctype)
5786 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5790 /* If we were able to eliminate our operation from the first side,
5791 apply our operation to the second side and reform the PLUS. */
5792 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5793 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5795 /* The last case is if we are a multiply. In that case, we can
5796 apply the distributive law to commute the multiply and addition
5797 if the multiplication of the constants doesn't overflow. */
5798 if (code == MULT_EXPR)
5799 return fold_build2 (tcode, ctype,
5800 fold_build2 (code, ctype,
5801 fold_convert (ctype, op0),
5802 fold_convert (ctype, c)),
5808 /* We have a special case here if we are doing something like
5809 (C * 8) % 4 since we know that's zero. */
5810 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5811 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5812 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5813 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5814 return omit_one_operand (type, integer_zero_node, op0);
5816 /* ... fall through ... */
5818 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5819 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5820 /* If we can extract our operation from the LHS, do so and return a
5821 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5822 do something only if the second operand is a constant. */
5824 && (t1 = extract_muldiv (op0, c, code, wide_type,
5825 strict_overflow_p)) != 0)
5826 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5827 fold_convert (ctype, op1));
5828 else if (tcode == MULT_EXPR && code == MULT_EXPR
5829 && (t1 = extract_muldiv (op1, c, code, wide_type,
5830 strict_overflow_p)) != 0)
5831 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5832 fold_convert (ctype, t1));
5833 else if (TREE_CODE (op1) != INTEGER_CST)
5836 /* If these are the same operation types, we can associate them
5837 assuming no overflow. */
5839 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5840 fold_convert (ctype, c), 0))
5841 && ! TREE_OVERFLOW (t1))
5842 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5844 /* If these operations "cancel" each other, we have the main
5845 optimizations of this pass, which occur when either constant is a
5846 multiple of the other, in which case we replace this with either an
5847 operation or CODE or TCODE.
5849 If we have an unsigned type that is not a sizetype, we cannot do
5850 this since it will change the result if the original computation
5852 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5853 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5854 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5855 || (tcode == MULT_EXPR
5856 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5857 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5859 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5861 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5862 *strict_overflow_p = true;
5863 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5864 fold_convert (ctype,
5865 const_binop (TRUNC_DIV_EXPR,
5868 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5870 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5871 *strict_overflow_p = true;
5872 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5873 fold_convert (ctype,
5874 const_binop (TRUNC_DIV_EXPR,
5887 /* Return a node which has the indicated constant VALUE (either 0 or
5888 1), and is of the indicated TYPE. */
5891 constant_boolean_node (int value, tree type)
5893 if (type == integer_type_node)
5894 return value ? integer_one_node : integer_zero_node;
5895 else if (type == boolean_type_node)
5896 return value ? boolean_true_node : boolean_false_node;
5898 return build_int_cst (type, value);
5902 /* Return true if expr looks like an ARRAY_REF and set base and
5903 offset to the appropriate trees. If there is no offset,
5904 offset is set to NULL_TREE. Base will be canonicalized to
5905 something you can get the element type from using
5906 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5907 in bytes to the base. */
5910 extract_array_ref (tree expr, tree *base, tree *offset)
5912 /* One canonical form is a PLUS_EXPR with the first
5913 argument being an ADDR_EXPR with a possible NOP_EXPR
5915 if (TREE_CODE (expr) == PLUS_EXPR)
5917 tree op0 = TREE_OPERAND (expr, 0);
5918 tree inner_base, dummy1;
5919 /* Strip NOP_EXPRs here because the C frontends and/or
5920 folders present us (int *)&x.a + 4B possibly. */
5922 if (extract_array_ref (op0, &inner_base, &dummy1))
5925 if (dummy1 == NULL_TREE)
5926 *offset = TREE_OPERAND (expr, 1);
5928 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5929 dummy1, TREE_OPERAND (expr, 1));
5933 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5934 which we transform into an ADDR_EXPR with appropriate
5935 offset. For other arguments to the ADDR_EXPR we assume
5936 zero offset and as such do not care about the ADDR_EXPR
5937 type and strip possible nops from it. */
5938 else if (TREE_CODE (expr) == ADDR_EXPR)
5940 tree op0 = TREE_OPERAND (expr, 0);
5941 if (TREE_CODE (op0) == ARRAY_REF)
5943 tree idx = TREE_OPERAND (op0, 1);
5944 *base = TREE_OPERAND (op0, 0);
5945 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5946 array_ref_element_size (op0));
5950 /* Handle array-to-pointer decay as &a. */
5951 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5952 *base = TREE_OPERAND (expr, 0);
5955 *offset = NULL_TREE;
5959 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5960 else if (SSA_VAR_P (expr)
5961 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5964 *offset = NULL_TREE;
5972 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5973 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5974 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5975 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5976 COND is the first argument to CODE; otherwise (as in the example
5977 given here), it is the second argument. TYPE is the type of the
5978 original expression. Return NULL_TREE if no simplification is
5982 fold_binary_op_with_conditional_arg (enum tree_code code,
5983 tree type, tree op0, tree op1,
5984 tree cond, tree arg, int cond_first_p)
5986 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5987 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5988 tree test, true_value, false_value;
5989 tree lhs = NULL_TREE;
5990 tree rhs = NULL_TREE;
5992 /* This transformation is only worthwhile if we don't have to wrap
5993 arg in a SAVE_EXPR, and the operation can be simplified on at least
5994 one of the branches once its pushed inside the COND_EXPR. */
5995 if (!TREE_CONSTANT (arg))
5998 if (TREE_CODE (cond) == COND_EXPR)
6000 test = TREE_OPERAND (cond, 0);
6001 true_value = TREE_OPERAND (cond, 1);
6002 false_value = TREE_OPERAND (cond, 2);
6003 /* If this operand throws an expression, then it does not make
6004 sense to try to perform a logical or arithmetic operation
6006 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6008 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6013 tree testtype = TREE_TYPE (cond);
6015 true_value = constant_boolean_node (true, testtype);
6016 false_value = constant_boolean_node (false, testtype);
6019 arg = fold_convert (arg_type, arg);
6022 true_value = fold_convert (cond_type, true_value);
6024 lhs = fold_build2 (code, type, true_value, arg);
6026 lhs = fold_build2 (code, type, arg, true_value);
6030 false_value = fold_convert (cond_type, false_value);
6032 rhs = fold_build2 (code, type, false_value, arg);
6034 rhs = fold_build2 (code, type, arg, false_value);
6037 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6038 return fold_convert (type, test);
6042 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6044 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6045 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6046 ADDEND is the same as X.
6048 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6049 and finite. The problematic cases are when X is zero, and its mode
6050 has signed zeros. In the case of rounding towards -infinity,
6051 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6052 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6055 fold_real_zero_addition_p (tree type, tree addend, int negate)
6057 if (!real_zerop (addend))
6060 /* Don't allow the fold with -fsignaling-nans. */
6061 if (HONOR_SNANS (TYPE_MODE (type)))
6064 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6065 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6068 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6069 if (TREE_CODE (addend) == REAL_CST
6070 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6073 /* The mode has signed zeros, and we have to honor their sign.
6074 In this situation, there is only one case we can return true for.
6075 X - 0 is the same as X unless rounding towards -infinity is
6077 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6080 /* Subroutine of fold() that checks comparisons of built-in math
6081 functions against real constants.
6083 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6084 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6085 is the type of the result and ARG0 and ARG1 are the operands of the
6086 comparison. ARG1 must be a TREE_REAL_CST.
6088 The function returns the constant folded tree if a simplification
6089 can be made, and NULL_TREE otherwise. */
6092 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6093 tree type, tree arg0, tree arg1)
6097 if (BUILTIN_SQRT_P (fcode))
6099 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
6100 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6102 c = TREE_REAL_CST (arg1);
6103 if (REAL_VALUE_NEGATIVE (c))
6105 /* sqrt(x) < y is always false, if y is negative. */
6106 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6107 return omit_one_operand (type, integer_zero_node, arg);
6109 /* sqrt(x) > y is always true, if y is negative and we
6110 don't care about NaNs, i.e. negative values of x. */
6111 if (code == NE_EXPR || !HONOR_NANS (mode))
6112 return omit_one_operand (type, integer_one_node, arg);
6114 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6115 return fold_build2 (GE_EXPR, type, arg,
6116 build_real (TREE_TYPE (arg), dconst0));
6118 else if (code == GT_EXPR || code == GE_EXPR)
6122 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6123 real_convert (&c2, mode, &c2);
6125 if (REAL_VALUE_ISINF (c2))
6127 /* sqrt(x) > y is x == +Inf, when y is very large. */
6128 if (HONOR_INFINITIES (mode))
6129 return fold_build2 (EQ_EXPR, type, arg,
6130 build_real (TREE_TYPE (arg), c2));
6132 /* sqrt(x) > y is always false, when y is very large
6133 and we don't care about infinities. */
6134 return omit_one_operand (type, integer_zero_node, arg);
6137 /* sqrt(x) > c is the same as x > c*c. */
6138 return fold_build2 (code, type, arg,
6139 build_real (TREE_TYPE (arg), c2));
6141 else if (code == LT_EXPR || code == LE_EXPR)
6145 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6146 real_convert (&c2, mode, &c2);
6148 if (REAL_VALUE_ISINF (c2))
6150 /* sqrt(x) < y is always true, when y is a very large
6151 value and we don't care about NaNs or Infinities. */
6152 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6153 return omit_one_operand (type, integer_one_node, arg);
6155 /* sqrt(x) < y is x != +Inf when y is very large and we
6156 don't care about NaNs. */
6157 if (! HONOR_NANS (mode))
6158 return fold_build2 (NE_EXPR, type, arg,
6159 build_real (TREE_TYPE (arg), c2));
6161 /* sqrt(x) < y is x >= 0 when y is very large and we
6162 don't care about Infinities. */
6163 if (! HONOR_INFINITIES (mode))
6164 return fold_build2 (GE_EXPR, type, arg,
6165 build_real (TREE_TYPE (arg), dconst0));
6167 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6168 if (lang_hooks.decls.global_bindings_p () != 0
6169 || CONTAINS_PLACEHOLDER_P (arg))
6172 arg = save_expr (arg);
6173 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6174 fold_build2 (GE_EXPR, type, arg,
6175 build_real (TREE_TYPE (arg),
6177 fold_build2 (NE_EXPR, type, arg,
6178 build_real (TREE_TYPE (arg),
6182 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6183 if (! HONOR_NANS (mode))
6184 return fold_build2 (code, type, arg,
6185 build_real (TREE_TYPE (arg), c2));
6187 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6188 if (lang_hooks.decls.global_bindings_p () == 0
6189 && ! CONTAINS_PLACEHOLDER_P (arg))
6191 arg = save_expr (arg);
6192 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6193 fold_build2 (GE_EXPR, type, arg,
6194 build_real (TREE_TYPE (arg),
6196 fold_build2 (code, type, arg,
6197 build_real (TREE_TYPE (arg),
6206 /* Subroutine of fold() that optimizes comparisons against Infinities,
6207 either +Inf or -Inf.
6209 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6210 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6211 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6213 The function returns the constant folded tree if a simplification
6214 can be made, and NULL_TREE otherwise. */
6217 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6219 enum machine_mode mode;
6220 REAL_VALUE_TYPE max;
6224 mode = TYPE_MODE (TREE_TYPE (arg0));
6226 /* For negative infinity swap the sense of the comparison. */
6227 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6229 code = swap_tree_comparison (code);
6234 /* x > +Inf is always false, if with ignore sNANs. */
6235 if (HONOR_SNANS (mode))
6237 return omit_one_operand (type, integer_zero_node, arg0);
6240 /* x <= +Inf is always true, if we don't case about NaNs. */
6241 if (! HONOR_NANS (mode))
6242 return omit_one_operand (type, integer_one_node, arg0);
6244 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6245 if (lang_hooks.decls.global_bindings_p () == 0
6246 && ! CONTAINS_PLACEHOLDER_P (arg0))
6248 arg0 = save_expr (arg0);
6249 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6255 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6256 real_maxval (&max, neg, mode);
6257 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6258 arg0, build_real (TREE_TYPE (arg0), max));
6261 /* x < +Inf is always equal to x <= DBL_MAX. */
6262 real_maxval (&max, neg, mode);
6263 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6264 arg0, build_real (TREE_TYPE (arg0), max));
6267 /* x != +Inf is always equal to !(x > DBL_MAX). */
6268 real_maxval (&max, neg, mode);
6269 if (! HONOR_NANS (mode))
6270 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6271 arg0, build_real (TREE_TYPE (arg0), max));
6273 /* The transformation below creates non-gimple code and thus is
6274 not appropriate if we are in gimple form. */
6278 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6279 arg0, build_real (TREE_TYPE (arg0), max));
6280 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6289 /* Subroutine of fold() that optimizes comparisons of a division by
6290 a nonzero integer constant against an integer constant, i.e.
6293 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6294 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6295 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6297 The function returns the constant folded tree if a simplification
6298 can be made, and NULL_TREE otherwise. */
6301 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6303 tree prod, tmp, hi, lo;
6304 tree arg00 = TREE_OPERAND (arg0, 0);
6305 tree arg01 = TREE_OPERAND (arg0, 1);
6306 unsigned HOST_WIDE_INT lpart;
6307 HOST_WIDE_INT hpart;
6308 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6312 /* We have to do this the hard way to detect unsigned overflow.
6313 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6314 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6315 TREE_INT_CST_HIGH (arg01),
6316 TREE_INT_CST_LOW (arg1),
6317 TREE_INT_CST_HIGH (arg1),
6318 &lpart, &hpart, unsigned_p);
6319 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6320 prod = force_fit_type (prod, -1, overflow, false);
6321 neg_overflow = false;
6325 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6328 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6329 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6330 TREE_INT_CST_HIGH (prod),
6331 TREE_INT_CST_LOW (tmp),
6332 TREE_INT_CST_HIGH (tmp),
6333 &lpart, &hpart, unsigned_p);
6334 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6335 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6336 TREE_CONSTANT_OVERFLOW (prod));
6338 else if (tree_int_cst_sgn (arg01) >= 0)
6340 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6341 switch (tree_int_cst_sgn (arg1))
6344 neg_overflow = true;
6345 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6350 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6355 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6365 /* A negative divisor reverses the relational operators. */
6366 code = swap_tree_comparison (code);
6368 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6369 switch (tree_int_cst_sgn (arg1))
6372 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6377 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6382 neg_overflow = true;
6383 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6395 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6396 return omit_one_operand (type, integer_zero_node, arg00);
6397 if (TREE_OVERFLOW (hi))
6398 return fold_build2 (GE_EXPR, type, arg00, lo);
6399 if (TREE_OVERFLOW (lo))
6400 return fold_build2 (LE_EXPR, type, arg00, hi);
6401 return build_range_check (type, arg00, 1, lo, hi);
6404 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6405 return omit_one_operand (type, integer_one_node, arg00);
6406 if (TREE_OVERFLOW (hi))
6407 return fold_build2 (LT_EXPR, type, arg00, lo);
6408 if (TREE_OVERFLOW (lo))
6409 return fold_build2 (GT_EXPR, type, arg00, hi);
6410 return build_range_check (type, arg00, 0, lo, hi);
6413 if (TREE_OVERFLOW (lo))
6415 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6416 return omit_one_operand (type, tmp, arg00);
6418 return fold_build2 (LT_EXPR, type, arg00, lo);
6421 if (TREE_OVERFLOW (hi))
6423 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6424 return omit_one_operand (type, tmp, arg00);
6426 return fold_build2 (LE_EXPR, type, arg00, hi);
6429 if (TREE_OVERFLOW (hi))
6431 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6432 return omit_one_operand (type, tmp, arg00);
6434 return fold_build2 (GT_EXPR, type, arg00, hi);
6437 if (TREE_OVERFLOW (lo))
6439 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6440 return omit_one_operand (type, tmp, arg00);
6442 return fold_build2 (GE_EXPR, type, arg00, lo);
6452 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6453 equality/inequality test, then return a simplified form of the test
6454 using a sign testing. Otherwise return NULL. TYPE is the desired
6458 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6461 /* If this is testing a single bit, we can optimize the test. */
6462 if ((code == NE_EXPR || code == EQ_EXPR)
6463 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6464 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6466 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6467 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6468 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6470 if (arg00 != NULL_TREE
6471 /* This is only a win if casting to a signed type is cheap,
6472 i.e. when arg00's type is not a partial mode. */
6473 && TYPE_PRECISION (TREE_TYPE (arg00))
6474 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6476 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6477 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6478 result_type, fold_convert (stype, arg00),
6479 build_int_cst (stype, 0));
6486 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6487 equality/inequality test, then return a simplified form of
6488 the test using shifts and logical operations. Otherwise return
6489 NULL. TYPE is the desired result type. */
6492 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6495 /* If this is testing a single bit, we can optimize the test. */
6496 if ((code == NE_EXPR || code == EQ_EXPR)
6497 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6498 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6500 tree inner = TREE_OPERAND (arg0, 0);
6501 tree type = TREE_TYPE (arg0);
6502 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6503 enum machine_mode operand_mode = TYPE_MODE (type);
6505 tree signed_type, unsigned_type, intermediate_type;
6508 /* First, see if we can fold the single bit test into a sign-bit
6510 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6515 /* Otherwise we have (A & C) != 0 where C is a single bit,
6516 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6517 Similarly for (A & C) == 0. */
6519 /* If INNER is a right shift of a constant and it plus BITNUM does
6520 not overflow, adjust BITNUM and INNER. */
6521 if (TREE_CODE (inner) == RSHIFT_EXPR
6522 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6523 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6524 && bitnum < TYPE_PRECISION (type)
6525 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6526 bitnum - TYPE_PRECISION (type)))
6528 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6529 inner = TREE_OPERAND (inner, 0);
6532 /* If we are going to be able to omit the AND below, we must do our
6533 operations as unsigned. If we must use the AND, we have a choice.
6534 Normally unsigned is faster, but for some machines signed is. */
6535 #ifdef LOAD_EXTEND_OP
6536 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6537 && !flag_syntax_only) ? 0 : 1;
6542 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6543 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6544 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6545 inner = fold_convert (intermediate_type, inner);
6548 inner = build2 (RSHIFT_EXPR, intermediate_type,
6549 inner, size_int (bitnum));
6551 if (code == EQ_EXPR)
6552 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6553 inner, integer_one_node);
6555 /* Put the AND last so it can combine with more things. */
6556 inner = build2 (BIT_AND_EXPR, intermediate_type,
6557 inner, integer_one_node);
6559 /* Make sure to return the proper type. */
6560 inner = fold_convert (result_type, inner);
6567 /* Check whether we are allowed to reorder operands arg0 and arg1,
6568 such that the evaluation of arg1 occurs before arg0. */
6571 reorder_operands_p (tree arg0, tree arg1)
6573 if (! flag_evaluation_order)
6575 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6577 return ! TREE_SIDE_EFFECTS (arg0)
6578 && ! TREE_SIDE_EFFECTS (arg1);
6581 /* Test whether it is preferable two swap two operands, ARG0 and
6582 ARG1, for example because ARG0 is an integer constant and ARG1
6583 isn't. If REORDER is true, only recommend swapping if we can
6584 evaluate the operands in reverse order. */
6587 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6589 STRIP_SIGN_NOPS (arg0);
6590 STRIP_SIGN_NOPS (arg1);
6592 if (TREE_CODE (arg1) == INTEGER_CST)
6594 if (TREE_CODE (arg0) == INTEGER_CST)
6597 if (TREE_CODE (arg1) == REAL_CST)
6599 if (TREE_CODE (arg0) == REAL_CST)
6602 if (TREE_CODE (arg1) == COMPLEX_CST)
6604 if (TREE_CODE (arg0) == COMPLEX_CST)
6607 if (TREE_CONSTANT (arg1))
6609 if (TREE_CONSTANT (arg0))
6615 if (reorder && flag_evaluation_order
6616 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6624 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6625 for commutative and comparison operators. Ensuring a canonical
6626 form allows the optimizers to find additional redundancies without
6627 having to explicitly check for both orderings. */
6628 if (TREE_CODE (arg0) == SSA_NAME
6629 && TREE_CODE (arg1) == SSA_NAME
6630 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6636 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6637 ARG0 is extended to a wider type. */
6640 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6642 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6644 tree shorter_type, outer_type;
6648 if (arg0_unw == arg0)
6650 shorter_type = TREE_TYPE (arg0_unw);
6652 #ifdef HAVE_canonicalize_funcptr_for_compare
6653 /* Disable this optimization if we're casting a function pointer
6654 type on targets that require function pointer canonicalization. */
6655 if (HAVE_canonicalize_funcptr_for_compare
6656 && TREE_CODE (shorter_type) == POINTER_TYPE
6657 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6661 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6664 arg1_unw = get_unwidened (arg1, NULL_TREE);
6666 /* If possible, express the comparison in the shorter mode. */
6667 if ((code == EQ_EXPR || code == NE_EXPR
6668 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6669 && (TREE_TYPE (arg1_unw) == shorter_type
6670 || (TYPE_PRECISION (shorter_type)
6671 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6672 || (TREE_CODE (arg1_unw) == INTEGER_CST
6673 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6674 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6675 && int_fits_type_p (arg1_unw, shorter_type))))
6676 return fold_build2 (code, type, arg0_unw,
6677 fold_convert (shorter_type, arg1_unw));
6679 if (TREE_CODE (arg1_unw) != INTEGER_CST
6680 || TREE_CODE (shorter_type) != INTEGER_TYPE
6681 || !int_fits_type_p (arg1_unw, shorter_type))
6684 /* If we are comparing with the integer that does not fit into the range
6685 of the shorter type, the result is known. */
6686 outer_type = TREE_TYPE (arg1_unw);
6687 min = lower_bound_in_type (outer_type, shorter_type);
6688 max = upper_bound_in_type (outer_type, shorter_type);
6690 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6692 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6699 return omit_one_operand (type, integer_zero_node, arg0);
6704 return omit_one_operand (type, integer_one_node, arg0);
6710 return omit_one_operand (type, integer_one_node, arg0);
6712 return omit_one_operand (type, integer_zero_node, arg0);
6717 return omit_one_operand (type, integer_zero_node, arg0);
6719 return omit_one_operand (type, integer_one_node, arg0);
6728 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6729 ARG0 just the signedness is changed. */
6732 fold_sign_changed_comparison (enum tree_code code, tree type,
6733 tree arg0, tree arg1)
6735 tree arg0_inner, tmp;
6736 tree inner_type, outer_type;
6738 if (TREE_CODE (arg0) != NOP_EXPR
6739 && TREE_CODE (arg0) != CONVERT_EXPR)
6742 outer_type = TREE_TYPE (arg0);
6743 arg0_inner = TREE_OPERAND (arg0, 0);
6744 inner_type = TREE_TYPE (arg0_inner);
6746 #ifdef HAVE_canonicalize_funcptr_for_compare
6747 /* Disable this optimization if we're casting a function pointer
6748 type on targets that require function pointer canonicalization. */
6749 if (HAVE_canonicalize_funcptr_for_compare
6750 && TREE_CODE (inner_type) == POINTER_TYPE
6751 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6755 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6758 if (TREE_CODE (arg1) != INTEGER_CST
6759 && !((TREE_CODE (arg1) == NOP_EXPR
6760 || TREE_CODE (arg1) == CONVERT_EXPR)
6761 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6764 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6769 if (TREE_CODE (arg1) == INTEGER_CST)
6771 tmp = build_int_cst_wide (inner_type,
6772 TREE_INT_CST_LOW (arg1),
6773 TREE_INT_CST_HIGH (arg1));
6774 arg1 = force_fit_type (tmp, 0,
6775 TREE_OVERFLOW (arg1),
6776 TREE_CONSTANT_OVERFLOW (arg1));
6779 arg1 = fold_convert (inner_type, arg1);
6781 return fold_build2 (code, type, arg0_inner, arg1);
6784 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6785 step of the array. Reconstructs s and delta in the case of s * delta
6786 being an integer constant (and thus already folded).
6787 ADDR is the address. MULT is the multiplicative expression.
6788 If the function succeeds, the new address expression is returned. Otherwise
6789 NULL_TREE is returned. */
6792 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6794 tree s, delta, step;
6795 tree ref = TREE_OPERAND (addr, 0), pref;
6799 /* Canonicalize op1 into a possibly non-constant delta
6800 and an INTEGER_CST s. */
6801 if (TREE_CODE (op1) == MULT_EXPR)
6803 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6808 if (TREE_CODE (arg0) == INTEGER_CST)
6813 else if (TREE_CODE (arg1) == INTEGER_CST)
6821 else if (TREE_CODE (op1) == INTEGER_CST)
6828 /* Simulate we are delta * 1. */
6830 s = integer_one_node;
6833 for (;; ref = TREE_OPERAND (ref, 0))
6835 if (TREE_CODE (ref) == ARRAY_REF)
6837 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6841 step = array_ref_element_size (ref);
6842 if (TREE_CODE (step) != INTEGER_CST)
6847 if (! tree_int_cst_equal (step, s))
6852 /* Try if delta is a multiple of step. */
6853 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6862 if (!handled_component_p (ref))
6866 /* We found the suitable array reference. So copy everything up to it,
6867 and replace the index. */
6869 pref = TREE_OPERAND (addr, 0);
6870 ret = copy_node (pref);
6875 pref = TREE_OPERAND (pref, 0);
6876 TREE_OPERAND (pos, 0) = copy_node (pref);
6877 pos = TREE_OPERAND (pos, 0);
6880 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6881 fold_convert (itype,
6882 TREE_OPERAND (pos, 1)),
6883 fold_convert (itype, delta));
6885 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6889 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6890 means A >= Y && A != MAX, but in this case we know that
6891 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6894 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6896 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6898 if (TREE_CODE (bound) == LT_EXPR)
6899 a = TREE_OPERAND (bound, 0);
6900 else if (TREE_CODE (bound) == GT_EXPR)
6901 a = TREE_OPERAND (bound, 1);
6905 typea = TREE_TYPE (a);
6906 if (!INTEGRAL_TYPE_P (typea)
6907 && !POINTER_TYPE_P (typea))
6910 if (TREE_CODE (ineq) == LT_EXPR)
6912 a1 = TREE_OPERAND (ineq, 1);
6913 y = TREE_OPERAND (ineq, 0);
6915 else if (TREE_CODE (ineq) == GT_EXPR)
6917 a1 = TREE_OPERAND (ineq, 0);
6918 y = TREE_OPERAND (ineq, 1);
6923 if (TREE_TYPE (a1) != typea)
6926 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6927 if (!integer_onep (diff))
6930 return fold_build2 (GE_EXPR, type, a, y);
6933 /* Fold a sum or difference of at least one multiplication.
6934 Returns the folded tree or NULL if no simplification could be made. */
6937 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6939 tree arg00, arg01, arg10, arg11;
6940 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6942 /* (A * C) +- (B * C) -> (A+-B) * C.
6943 (A * C) +- A -> A * (C+-1).
6944 We are most concerned about the case where C is a constant,
6945 but other combinations show up during loop reduction. Since
6946 it is not difficult, try all four possibilities. */
6948 if (TREE_CODE (arg0) == MULT_EXPR)
6950 arg00 = TREE_OPERAND (arg0, 0);
6951 arg01 = TREE_OPERAND (arg0, 1);
6956 arg01 = build_one_cst (type);
6958 if (TREE_CODE (arg1) == MULT_EXPR)
6960 arg10 = TREE_OPERAND (arg1, 0);
6961 arg11 = TREE_OPERAND (arg1, 1);
6966 arg11 = build_one_cst (type);
6970 if (operand_equal_p (arg01, arg11, 0))
6971 same = arg01, alt0 = arg00, alt1 = arg10;
6972 else if (operand_equal_p (arg00, arg10, 0))
6973 same = arg00, alt0 = arg01, alt1 = arg11;
6974 else if (operand_equal_p (arg00, arg11, 0))
6975 same = arg00, alt0 = arg01, alt1 = arg10;
6976 else if (operand_equal_p (arg01, arg10, 0))
6977 same = arg01, alt0 = arg00, alt1 = arg11;
6979 /* No identical multiplicands; see if we can find a common
6980 power-of-two factor in non-power-of-two multiplies. This
6981 can help in multi-dimensional array access. */
6982 else if (host_integerp (arg01, 0)
6983 && host_integerp (arg11, 0))
6985 HOST_WIDE_INT int01, int11, tmp;
6988 int01 = TREE_INT_CST_LOW (arg01);
6989 int11 = TREE_INT_CST_LOW (arg11);
6991 /* Move min of absolute values to int11. */
6992 if ((int01 >= 0 ? int01 : -int01)
6993 < (int11 >= 0 ? int11 : -int11))
6995 tmp = int01, int01 = int11, int11 = tmp;
6996 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7003 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
7005 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7006 build_int_cst (TREE_TYPE (arg00),
7011 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7016 return fold_build2 (MULT_EXPR, type,
7017 fold_build2 (code, type,
7018 fold_convert (type, alt0),
7019 fold_convert (type, alt1)),
7020 fold_convert (type, same));
7025 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7026 specified by EXPR into the buffer PTR of length LEN bytes.
7027 Return the number of bytes placed in the buffer, or zero
7031 native_encode_int (tree expr, unsigned char *ptr, int len)
7033 tree type = TREE_TYPE (expr);
7034 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7035 int byte, offset, word, words;
7036 unsigned char value;
7038 if (total_bytes > len)
7040 words = total_bytes / UNITS_PER_WORD;
7042 for (byte = 0; byte < total_bytes; byte++)
7044 int bitpos = byte * BITS_PER_UNIT;
7045 if (bitpos < HOST_BITS_PER_WIDE_INT)
7046 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7048 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7049 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7051 if (total_bytes > UNITS_PER_WORD)
7053 word = byte / UNITS_PER_WORD;
7054 if (WORDS_BIG_ENDIAN)
7055 word = (words - 1) - word;
7056 offset = word * UNITS_PER_WORD;
7057 if (BYTES_BIG_ENDIAN)
7058 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7060 offset += byte % UNITS_PER_WORD;
7063 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7064 ptr[offset] = value;
7070 /* Subroutine of native_encode_expr. Encode the REAL_CST
7071 specified by EXPR into the buffer PTR of length LEN bytes.
7072 Return the number of bytes placed in the buffer, or zero
7076 native_encode_real (tree expr, unsigned char *ptr, int len)
7078 tree type = TREE_TYPE (expr);
7079 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7080 int byte, offset, word, words, bitpos;
7081 unsigned char value;
7083 /* There are always 32 bits in each long, no matter the size of
7084 the hosts long. We handle floating point representations with
7088 if (total_bytes > len)
7090 words = 32 / UNITS_PER_WORD;
7092 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7094 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7095 bitpos += BITS_PER_UNIT)
7097 byte = (bitpos / BITS_PER_UNIT) & 3;
7098 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7100 if (UNITS_PER_WORD < 4)
7102 word = byte / UNITS_PER_WORD;
7103 if (WORDS_BIG_ENDIAN)
7104 word = (words - 1) - word;
7105 offset = word * UNITS_PER_WORD;
7106 if (BYTES_BIG_ENDIAN)
7107 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7109 offset += byte % UNITS_PER_WORD;
7112 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7113 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7118 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7119 specified by EXPR into the buffer PTR of length LEN bytes.
7120 Return the number of bytes placed in the buffer, or zero
7124 native_encode_complex (tree expr, unsigned char *ptr, int len)
7129 part = TREE_REALPART (expr);
7130 rsize = native_encode_expr (part, ptr, len);
7133 part = TREE_IMAGPART (expr);
7134 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7137 return rsize + isize;
7141 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7142 specified by EXPR into the buffer PTR of length LEN bytes.
7143 Return the number of bytes placed in the buffer, or zero
7147 native_encode_vector (tree expr, unsigned char *ptr, int len)
7149 int i, size, offset, count;
7150 tree itype, elem, elements;
7153 elements = TREE_VECTOR_CST_ELTS (expr);
7154 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7155 itype = TREE_TYPE (TREE_TYPE (expr));
7156 size = GET_MODE_SIZE (TYPE_MODE (itype));
7157 for (i = 0; i < count; i++)
7161 elem = TREE_VALUE (elements);
7162 elements = TREE_CHAIN (elements);
7169 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7174 if (offset + size > len)
7176 memset (ptr+offset, 0, size);
7184 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7185 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7186 buffer PTR of length LEN bytes. Return the number of bytes
7187 placed in the buffer, or zero upon failure. */
7190 native_encode_expr (tree expr, unsigned char *ptr, int len)
7192 switch (TREE_CODE (expr))
7195 return native_encode_int (expr, ptr, len);
7198 return native_encode_real (expr, ptr, len);
7201 return native_encode_complex (expr, ptr, len);
7204 return native_encode_vector (expr, ptr, len);
7212 /* Subroutine of native_interpret_expr. Interpret the contents of
7213 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7214 If the buffer cannot be interpreted, return NULL_TREE. */
7217 native_interpret_int (tree type, unsigned char *ptr, int len)
7219 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7220 int byte, offset, word, words;
7221 unsigned char value;
7222 unsigned int HOST_WIDE_INT lo = 0;
7223 HOST_WIDE_INT hi = 0;
7225 if (total_bytes > len)
7227 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7229 words = total_bytes / UNITS_PER_WORD;
7231 for (byte = 0; byte < total_bytes; byte++)
7233 int bitpos = byte * BITS_PER_UNIT;
7234 if (total_bytes > UNITS_PER_WORD)
7236 word = byte / UNITS_PER_WORD;
7237 if (WORDS_BIG_ENDIAN)
7238 word = (words - 1) - word;
7239 offset = word * UNITS_PER_WORD;
7240 if (BYTES_BIG_ENDIAN)
7241 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7243 offset += byte % UNITS_PER_WORD;
7246 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7247 value = ptr[offset];
7249 if (bitpos < HOST_BITS_PER_WIDE_INT)
7250 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7252 hi |= (unsigned HOST_WIDE_INT) value
7253 << (bitpos - HOST_BITS_PER_WIDE_INT);
7256 return force_fit_type (build_int_cst_wide (type, lo, hi),
7261 /* Subroutine of native_interpret_expr. Interpret the contents of
7262 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7263 If the buffer cannot be interpreted, return NULL_TREE. */
7266 native_interpret_real (tree type, unsigned char *ptr, int len)
7268 enum machine_mode mode = TYPE_MODE (type);
7269 int total_bytes = GET_MODE_SIZE (mode);
7270 int byte, offset, word, words, bitpos;
7271 unsigned char value;
7272 /* There are always 32 bits in each long, no matter the size of
7273 the hosts long. We handle floating point representations with
7278 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7279 if (total_bytes > len || total_bytes > 24)
7281 words = 32 / UNITS_PER_WORD;
7283 memset (tmp, 0, sizeof (tmp));
7284 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7285 bitpos += BITS_PER_UNIT)
7287 byte = (bitpos / BITS_PER_UNIT) & 3;
7288 if (UNITS_PER_WORD < 4)
7290 word = byte / UNITS_PER_WORD;
7291 if (WORDS_BIG_ENDIAN)
7292 word = (words - 1) - word;
7293 offset = word * UNITS_PER_WORD;
7294 if (BYTES_BIG_ENDIAN)
7295 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7297 offset += byte % UNITS_PER_WORD;
7300 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7301 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7303 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7306 real_from_target (&r, tmp, mode);
7307 return build_real (type, r);
7311 /* Subroutine of native_interpret_expr. Interpret the contents of
7312 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7313 If the buffer cannot be interpreted, return NULL_TREE. */
7316 native_interpret_complex (tree type, unsigned char *ptr, int len)
7318 tree etype, rpart, ipart;
7321 etype = TREE_TYPE (type);
7322 size = GET_MODE_SIZE (TYPE_MODE (etype));
7325 rpart = native_interpret_expr (etype, ptr, size);
7328 ipart = native_interpret_expr (etype, ptr+size, size);
7331 return build_complex (type, rpart, ipart);
7335 /* Subroutine of native_interpret_expr. Interpret the contents of
7336 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7337 If the buffer cannot be interpreted, return NULL_TREE. */
7340 native_interpret_vector (tree type, unsigned char *ptr, int len)
7342 tree etype, elem, elements;
7345 etype = TREE_TYPE (type);
7346 size = GET_MODE_SIZE (TYPE_MODE (etype));
7347 count = TYPE_VECTOR_SUBPARTS (type);
7348 if (size * count > len)
7351 elements = NULL_TREE;
7352 for (i = count - 1; i >= 0; i--)
7354 elem = native_interpret_expr (etype, ptr+(i*size), size);
7357 elements = tree_cons (NULL_TREE, elem, elements);
7359 return build_vector (type, elements);
7363 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7364 the buffer PTR of length LEN as a constant of type TYPE. For
7365 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7366 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7367 return NULL_TREE. */
7370 native_interpret_expr (tree type, unsigned char *ptr, int len)
7372 switch (TREE_CODE (type))
7377 return native_interpret_int (type, ptr, len);
7380 return native_interpret_real (type, ptr, len);
7383 return native_interpret_complex (type, ptr, len);
7386 return native_interpret_vector (type, ptr, len);
7394 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7395 TYPE at compile-time. If we're unable to perform the conversion
7396 return NULL_TREE. */
7399 fold_view_convert_expr (tree type, tree expr)
7401 /* We support up to 512-bit values (for V8DFmode). */
7402 unsigned char buffer[64];
7405 /* Check that the host and target are sane. */
7406 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7409 len = native_encode_expr (expr, buffer, sizeof (buffer));
7413 return native_interpret_expr (type, buffer, len);
7417 /* Fold a unary expression of code CODE and type TYPE with operand
7418 OP0. Return the folded expression if folding is successful.
7419 Otherwise, return NULL_TREE. */
7422 fold_unary (enum tree_code code, tree type, tree op0)
7426 enum tree_code_class kind = TREE_CODE_CLASS (code);
7428 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7429 && TREE_CODE_LENGTH (code) == 1);
7434 if (code == NOP_EXPR || code == CONVERT_EXPR
7435 || code == FLOAT_EXPR || code == ABS_EXPR)
7437 /* Don't use STRIP_NOPS, because signedness of argument type
7439 STRIP_SIGN_NOPS (arg0);
7443 /* Strip any conversions that don't change the mode. This
7444 is safe for every expression, except for a comparison
7445 expression because its signedness is derived from its
7448 Note that this is done as an internal manipulation within
7449 the constant folder, in order to find the simplest
7450 representation of the arguments so that their form can be
7451 studied. In any cases, the appropriate type conversions
7452 should be put back in the tree that will get out of the
7458 if (TREE_CODE_CLASS (code) == tcc_unary)
7460 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7461 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7462 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7463 else if (TREE_CODE (arg0) == COND_EXPR)
7465 tree arg01 = TREE_OPERAND (arg0, 1);
7466 tree arg02 = TREE_OPERAND (arg0, 2);
7467 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7468 arg01 = fold_build1 (code, type, arg01);
7469 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7470 arg02 = fold_build1 (code, type, arg02);
7471 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7474 /* If this was a conversion, and all we did was to move into
7475 inside the COND_EXPR, bring it back out. But leave it if
7476 it is a conversion from integer to integer and the
7477 result precision is no wider than a word since such a
7478 conversion is cheap and may be optimized away by combine,
7479 while it couldn't if it were outside the COND_EXPR. Then return
7480 so we don't get into an infinite recursion loop taking the
7481 conversion out and then back in. */
7483 if ((code == NOP_EXPR || code == CONVERT_EXPR
7484 || code == NON_LVALUE_EXPR)
7485 && TREE_CODE (tem) == COND_EXPR
7486 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7487 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7488 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7489 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7490 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7491 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7492 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7494 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7495 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7496 || flag_syntax_only))
7497 tem = build1 (code, type,
7499 TREE_TYPE (TREE_OPERAND
7500 (TREE_OPERAND (tem, 1), 0)),
7501 TREE_OPERAND (tem, 0),
7502 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7503 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7506 else if (COMPARISON_CLASS_P (arg0))
7508 if (TREE_CODE (type) == BOOLEAN_TYPE)
7510 arg0 = copy_node (arg0);
7511 TREE_TYPE (arg0) = type;
7514 else if (TREE_CODE (type) != INTEGER_TYPE)
7515 return fold_build3 (COND_EXPR, type, arg0,
7516 fold_build1 (code, type,
7518 fold_build1 (code, type,
7519 integer_zero_node));
7528 case FIX_TRUNC_EXPR:
7530 case FIX_FLOOR_EXPR:
7531 case FIX_ROUND_EXPR:
7532 if (TREE_TYPE (op0) == type)
7535 /* If we have (type) (a CMP b) and type is an integral type, return
7536 new expression involving the new type. */
7537 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7538 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7539 TREE_OPERAND (op0, 1));
7541 /* Handle cases of two conversions in a row. */
7542 if (TREE_CODE (op0) == NOP_EXPR
7543 || TREE_CODE (op0) == CONVERT_EXPR)
7545 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7546 tree inter_type = TREE_TYPE (op0);
7547 int inside_int = INTEGRAL_TYPE_P (inside_type);
7548 int inside_ptr = POINTER_TYPE_P (inside_type);
7549 int inside_float = FLOAT_TYPE_P (inside_type);
7550 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7551 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7552 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7553 int inter_int = INTEGRAL_TYPE_P (inter_type);
7554 int inter_ptr = POINTER_TYPE_P (inter_type);
7555 int inter_float = FLOAT_TYPE_P (inter_type);
7556 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7557 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7558 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7559 int final_int = INTEGRAL_TYPE_P (type);
7560 int final_ptr = POINTER_TYPE_P (type);
7561 int final_float = FLOAT_TYPE_P (type);
7562 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7563 unsigned int final_prec = TYPE_PRECISION (type);
7564 int final_unsignedp = TYPE_UNSIGNED (type);
7566 /* In addition to the cases of two conversions in a row
7567 handled below, if we are converting something to its own
7568 type via an object of identical or wider precision, neither
7569 conversion is needed. */
7570 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7571 && (((inter_int || inter_ptr) && final_int)
7572 || (inter_float && final_float))
7573 && inter_prec >= final_prec)
7574 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7576 /* Likewise, if the intermediate and final types are either both
7577 float or both integer, we don't need the middle conversion if
7578 it is wider than the final type and doesn't change the signedness
7579 (for integers). Avoid this if the final type is a pointer
7580 since then we sometimes need the inner conversion. Likewise if
7581 the outer has a precision not equal to the size of its mode. */
7582 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7583 || (inter_float && inside_float)
7584 || (inter_vec && inside_vec))
7585 && inter_prec >= inside_prec
7586 && (inter_float || inter_vec
7587 || inter_unsignedp == inside_unsignedp)
7588 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7589 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7591 && (! final_vec || inter_prec == inside_prec))
7592 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7594 /* If we have a sign-extension of a zero-extended value, we can
7595 replace that by a single zero-extension. */
7596 if (inside_int && inter_int && final_int
7597 && inside_prec < inter_prec && inter_prec < final_prec
7598 && inside_unsignedp && !inter_unsignedp)
7599 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7601 /* Two conversions in a row are not needed unless:
7602 - some conversion is floating-point (overstrict for now), or
7603 - some conversion is a vector (overstrict for now), or
7604 - the intermediate type is narrower than both initial and
7606 - the intermediate type and innermost type differ in signedness,
7607 and the outermost type is wider than the intermediate, or
7608 - the initial type is a pointer type and the precisions of the
7609 intermediate and final types differ, or
7610 - the final type is a pointer type and the precisions of the
7611 initial and intermediate types differ.
7612 - the final type is a pointer type and the initial type not
7613 - the initial type is a pointer to an array and the final type
7615 /* Java pointer type conversions generate checks in some
7616 cases, so we explicitly disallow this optimization. */
7617 if (! inside_float && ! inter_float && ! final_float
7618 && ! inside_vec && ! inter_vec && ! final_vec
7619 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7620 && ! (inside_int && inter_int
7621 && inter_unsignedp != inside_unsignedp
7622 && inter_prec < final_prec)
7623 && ((inter_unsignedp && inter_prec > inside_prec)
7624 == (final_unsignedp && final_prec > inter_prec))
7625 && ! (inside_ptr && inter_prec != final_prec)
7626 && ! (final_ptr && inside_prec != inter_prec)
7627 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7628 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7629 && final_ptr == inside_ptr
7631 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7632 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE)
7633 && ! ((strcmp (lang_hooks.name, "GNU Java") == 0)
7635 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7638 /* Handle (T *)&A.B.C for A being of type T and B and C
7639 living at offset zero. This occurs frequently in
7640 C++ upcasting and then accessing the base. */
7641 if (TREE_CODE (op0) == ADDR_EXPR
7642 && POINTER_TYPE_P (type)
7643 && handled_component_p (TREE_OPERAND (op0, 0)))
7645 HOST_WIDE_INT bitsize, bitpos;
7647 enum machine_mode mode;
7648 int unsignedp, volatilep;
7649 tree base = TREE_OPERAND (op0, 0);
7650 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7651 &mode, &unsignedp, &volatilep, false);
7652 /* If the reference was to a (constant) zero offset, we can use
7653 the address of the base if it has the same base type
7654 as the result type. */
7655 if (! offset && bitpos == 0
7656 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7657 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7658 return fold_convert (type, build_fold_addr_expr (base));
7661 if (TREE_CODE (op0) == MODIFY_EXPR
7662 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7663 /* Detect assigning a bitfield. */
7664 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7665 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7667 /* Don't leave an assignment inside a conversion
7668 unless assigning a bitfield. */
7669 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
7670 /* First do the assignment, then return converted constant. */
7671 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7672 TREE_NO_WARNING (tem) = 1;
7673 TREE_USED (tem) = 1;
7677 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7678 constants (if x has signed type, the sign bit cannot be set
7679 in c). This folds extension into the BIT_AND_EXPR. */
7680 if (INTEGRAL_TYPE_P (type)
7681 && TREE_CODE (type) != BOOLEAN_TYPE
7682 && TREE_CODE (op0) == BIT_AND_EXPR
7683 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7686 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7689 if (TYPE_UNSIGNED (TREE_TYPE (and))
7690 || (TYPE_PRECISION (type)
7691 <= TYPE_PRECISION (TREE_TYPE (and))))
7693 else if (TYPE_PRECISION (TREE_TYPE (and1))
7694 <= HOST_BITS_PER_WIDE_INT
7695 && host_integerp (and1, 1))
7697 unsigned HOST_WIDE_INT cst;
7699 cst = tree_low_cst (and1, 1);
7700 cst &= (HOST_WIDE_INT) -1
7701 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7702 change = (cst == 0);
7703 #ifdef LOAD_EXTEND_OP
7705 && !flag_syntax_only
7706 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7709 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7710 and0 = fold_convert (uns, and0);
7711 and1 = fold_convert (uns, and1);
7717 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7718 TREE_INT_CST_HIGH (and1));
7719 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7720 TREE_CONSTANT_OVERFLOW (and1));
7721 return fold_build2 (BIT_AND_EXPR, type,
7722 fold_convert (type, and0), tem);
7726 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7727 T2 being pointers to types of the same size. */
7728 if (POINTER_TYPE_P (type)
7729 && BINARY_CLASS_P (arg0)
7730 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7731 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7733 tree arg00 = TREE_OPERAND (arg0, 0);
7735 tree t1 = TREE_TYPE (arg00);
7736 tree tt0 = TREE_TYPE (t0);
7737 tree tt1 = TREE_TYPE (t1);
7738 tree s0 = TYPE_SIZE (tt0);
7739 tree s1 = TYPE_SIZE (tt1);
7741 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7742 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7743 TREE_OPERAND (arg0, 1));
7746 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7747 of the same precision, and X is a integer type not narrower than
7748 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7749 if (INTEGRAL_TYPE_P (type)
7750 && TREE_CODE (op0) == BIT_NOT_EXPR
7751 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7752 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7753 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7754 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7756 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7757 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7758 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7759 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7762 tem = fold_convert_const (code, type, op0);
7763 return tem ? tem : NULL_TREE;
7765 case VIEW_CONVERT_EXPR:
7766 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7767 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7768 return fold_view_convert_expr (type, op0);
7771 tem = fold_negate_expr (arg0);
7773 return fold_convert (type, tem);
7777 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7778 return fold_abs_const (arg0, type);
7779 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7780 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7781 /* Convert fabs((double)float) into (double)fabsf(float). */
7782 else if (TREE_CODE (arg0) == NOP_EXPR
7783 && TREE_CODE (type) == REAL_TYPE)
7785 tree targ0 = strip_float_extensions (arg0);
7787 return fold_convert (type, fold_build1 (ABS_EXPR,
7791 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7792 else if (TREE_CODE (arg0) == ABS_EXPR)
7794 else if (tree_expr_nonnegative_p (arg0))
7797 /* Strip sign ops from argument. */
7798 if (TREE_CODE (type) == REAL_TYPE)
7800 tem = fold_strip_sign_ops (arg0);
7802 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7807 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7808 return fold_convert (type, arg0);
7809 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7811 tree itype = TREE_TYPE (type);
7812 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7813 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7814 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7816 if (TREE_CODE (arg0) == COMPLEX_CST)
7818 tree itype = TREE_TYPE (type);
7819 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7820 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7821 return build_complex (type, rpart, negate_expr (ipart));
7823 if (TREE_CODE (arg0) == CONJ_EXPR)
7824 return fold_convert (type, TREE_OPERAND (arg0, 0));
7828 if (TREE_CODE (arg0) == INTEGER_CST)
7829 return fold_not_const (arg0, type);
7830 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7831 return TREE_OPERAND (arg0, 0);
7832 /* Convert ~ (-A) to A - 1. */
7833 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7834 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7835 build_int_cst (type, 1));
7836 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7837 else if (INTEGRAL_TYPE_P (type)
7838 && ((TREE_CODE (arg0) == MINUS_EXPR
7839 && integer_onep (TREE_OPERAND (arg0, 1)))
7840 || (TREE_CODE (arg0) == PLUS_EXPR
7841 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7842 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7843 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7844 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7845 && (tem = fold_unary (BIT_NOT_EXPR, type,
7847 TREE_OPERAND (arg0, 0)))))
7848 return fold_build2 (BIT_XOR_EXPR, type, tem,
7849 fold_convert (type, TREE_OPERAND (arg0, 1)));
7850 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7851 && (tem = fold_unary (BIT_NOT_EXPR, type,
7853 TREE_OPERAND (arg0, 1)))))
7854 return fold_build2 (BIT_XOR_EXPR, type,
7855 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7859 case TRUTH_NOT_EXPR:
7860 /* The argument to invert_truthvalue must have Boolean type. */
7861 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7862 arg0 = fold_convert (boolean_type_node, arg0);
7864 /* Note that the operand of this must be an int
7865 and its values must be 0 or 1.
7866 ("true" is a fixed value perhaps depending on the language,
7867 but we don't handle values other than 1 correctly yet.) */
7868 tem = fold_truth_not_expr (arg0);
7871 return fold_convert (type, tem);
7874 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7875 return fold_convert (type, arg0);
7876 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7877 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7878 TREE_OPERAND (arg0, 1));
7879 if (TREE_CODE (arg0) == COMPLEX_CST)
7880 return fold_convert (type, TREE_REALPART (arg0));
7881 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7883 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7884 tem = fold_build2 (TREE_CODE (arg0), itype,
7885 fold_build1 (REALPART_EXPR, itype,
7886 TREE_OPERAND (arg0, 0)),
7887 fold_build1 (REALPART_EXPR, itype,
7888 TREE_OPERAND (arg0, 1)));
7889 return fold_convert (type, tem);
7891 if (TREE_CODE (arg0) == CONJ_EXPR)
7893 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7894 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7895 return fold_convert (type, tem);
7900 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7901 return fold_convert (type, integer_zero_node);
7902 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7903 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7904 TREE_OPERAND (arg0, 0));
7905 if (TREE_CODE (arg0) == COMPLEX_CST)
7906 return fold_convert (type, TREE_IMAGPART (arg0));
7907 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7909 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7910 tem = fold_build2 (TREE_CODE (arg0), itype,
7911 fold_build1 (IMAGPART_EXPR, itype,
7912 TREE_OPERAND (arg0, 0)),
7913 fold_build1 (IMAGPART_EXPR, itype,
7914 TREE_OPERAND (arg0, 1)));
7915 return fold_convert (type, tem);
7917 if (TREE_CODE (arg0) == CONJ_EXPR)
7919 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7920 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7921 return fold_convert (type, negate_expr (tem));
7927 } /* switch (code) */
7930 /* Fold a binary expression of code CODE and type TYPE with operands
7931 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7932 Return the folded expression if folding is successful. Otherwise,
7933 return NULL_TREE. */
7936 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7938 enum tree_code compl_code;
7940 if (code == MIN_EXPR)
7941 compl_code = MAX_EXPR;
7942 else if (code == MAX_EXPR)
7943 compl_code = MIN_EXPR;
7947 /* MIN (MAX (a, b), b) == b. */
7948 if (TREE_CODE (op0) == compl_code
7949 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7950 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7952 /* MIN (MAX (b, a), b) == b. */
7953 if (TREE_CODE (op0) == compl_code
7954 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7955 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7956 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7958 /* MIN (a, MAX (a, b)) == a. */
7959 if (TREE_CODE (op1) == compl_code
7960 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7961 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7962 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7964 /* MIN (a, MAX (b, a)) == a. */
7965 if (TREE_CODE (op1) == compl_code
7966 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7967 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7968 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7973 /* Subroutine of fold_binary. This routine performs all of the
7974 transformations that are common to the equality/inequality
7975 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7976 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7977 fold_binary should call fold_binary. Fold a comparison with
7978 tree code CODE and type TYPE with operands OP0 and OP1. Return
7979 the folded comparison or NULL_TREE. */
7982 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7984 tree arg0, arg1, tem;
7989 STRIP_SIGN_NOPS (arg0);
7990 STRIP_SIGN_NOPS (arg1);
7992 tem = fold_relational_const (code, type, arg0, arg1);
7993 if (tem != NULL_TREE)
7996 /* If one arg is a real or integer constant, put it last. */
7997 if (tree_swap_operands_p (arg0, arg1, true))
7998 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8000 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8001 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8002 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8003 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8004 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8005 && (TREE_CODE (arg1) == INTEGER_CST
8006 && !TREE_OVERFLOW (arg1)))
8008 tree const1 = TREE_OPERAND (arg0, 1);
8010 tree variable = TREE_OPERAND (arg0, 0);
8013 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8015 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8016 TREE_TYPE (arg1), const2, const1);
8017 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8018 && (TREE_CODE (lhs) != INTEGER_CST
8019 || !TREE_OVERFLOW (lhs)))
8021 fold_overflow_warning (("assuming signed overflow does not occur "
8022 "when changing X +- C1 cmp C2 to "
8024 WARN_STRICT_OVERFLOW_COMPARISON);
8025 return fold_build2 (code, type, variable, lhs);
8029 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8030 same object, then we can fold this to a comparison of the two offsets in
8031 signed size type. This is possible because pointer arithmetic is
8032 restricted to retain within an object and overflow on pointer differences
8033 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t.
8035 We check flag_wrapv directly because pointers types are unsigned,
8036 and therefore TYPE_OVERFLOW_WRAPS returns true for them. That is
8037 normally what we want to avoid certain odd overflow cases, but
8039 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8041 && !TYPE_OVERFLOW_TRAPS (TREE_TYPE (arg0)))
8043 tree base0, offset0, base1, offset1;
8045 if (extract_array_ref (arg0, &base0, &offset0)
8046 && extract_array_ref (arg1, &base1, &offset1)
8047 && operand_equal_p (base0, base1, 0))
8049 tree signed_size_type_node;
8050 signed_size_type_node = signed_type_for (size_type_node);
8052 /* By converting to signed size type we cover middle-end pointer
8053 arithmetic which operates on unsigned pointer types of size
8054 type size and ARRAY_REF offsets which are properly sign or
8055 zero extended from their type in case it is narrower than
8057 if (offset0 == NULL_TREE)
8058 offset0 = build_int_cst (signed_size_type_node, 0);
8060 offset0 = fold_convert (signed_size_type_node, offset0);
8061 if (offset1 == NULL_TREE)
8062 offset1 = build_int_cst (signed_size_type_node, 0);
8064 offset1 = fold_convert (signed_size_type_node, offset1);
8066 return fold_build2 (code, type, offset0, offset1);
8070 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8072 tree targ0 = strip_float_extensions (arg0);
8073 tree targ1 = strip_float_extensions (arg1);
8074 tree newtype = TREE_TYPE (targ0);
8076 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8077 newtype = TREE_TYPE (targ1);
8079 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8080 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8081 return fold_build2 (code, type, fold_convert (newtype, targ0),
8082 fold_convert (newtype, targ1));
8084 /* (-a) CMP (-b) -> b CMP a */
8085 if (TREE_CODE (arg0) == NEGATE_EXPR
8086 && TREE_CODE (arg1) == NEGATE_EXPR)
8087 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8088 TREE_OPERAND (arg0, 0));
8090 if (TREE_CODE (arg1) == REAL_CST)
8092 REAL_VALUE_TYPE cst;
8093 cst = TREE_REAL_CST (arg1);
8095 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8096 if (TREE_CODE (arg0) == NEGATE_EXPR)
8097 return fold_build2 (swap_tree_comparison (code), type,
8098 TREE_OPERAND (arg0, 0),
8099 build_real (TREE_TYPE (arg1),
8100 REAL_VALUE_NEGATE (cst)));
8102 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8103 /* a CMP (-0) -> a CMP 0 */
8104 if (REAL_VALUE_MINUS_ZERO (cst))
8105 return fold_build2 (code, type, arg0,
8106 build_real (TREE_TYPE (arg1), dconst0));
8108 /* x != NaN is always true, other ops are always false. */
8109 if (REAL_VALUE_ISNAN (cst)
8110 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8112 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8113 return omit_one_operand (type, tem, arg0);
8116 /* Fold comparisons against infinity. */
8117 if (REAL_VALUE_ISINF (cst))
8119 tem = fold_inf_compare (code, type, arg0, arg1);
8120 if (tem != NULL_TREE)
8125 /* If this is a comparison of a real constant with a PLUS_EXPR
8126 or a MINUS_EXPR of a real constant, we can convert it into a
8127 comparison with a revised real constant as long as no overflow
8128 occurs when unsafe_math_optimizations are enabled. */
8129 if (flag_unsafe_math_optimizations
8130 && TREE_CODE (arg1) == REAL_CST
8131 && (TREE_CODE (arg0) == PLUS_EXPR
8132 || TREE_CODE (arg0) == MINUS_EXPR)
8133 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8134 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8135 ? MINUS_EXPR : PLUS_EXPR,
8136 arg1, TREE_OPERAND (arg0, 1), 0))
8137 && ! TREE_CONSTANT_OVERFLOW (tem))
8138 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8140 /* Likewise, we can simplify a comparison of a real constant with
8141 a MINUS_EXPR whose first operand is also a real constant, i.e.
8142 (c1 - x) < c2 becomes x > c1-c2. */
8143 if (flag_unsafe_math_optimizations
8144 && TREE_CODE (arg1) == REAL_CST
8145 && TREE_CODE (arg0) == MINUS_EXPR
8146 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8147 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8149 && ! TREE_CONSTANT_OVERFLOW (tem))
8150 return fold_build2 (swap_tree_comparison (code), type,
8151 TREE_OPERAND (arg0, 1), tem);
8153 /* Fold comparisons against built-in math functions. */
8154 if (TREE_CODE (arg1) == REAL_CST
8155 && flag_unsafe_math_optimizations
8156 && ! flag_errno_math)
8158 enum built_in_function fcode = builtin_mathfn_code (arg0);
8160 if (fcode != END_BUILTINS)
8162 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8163 if (tem != NULL_TREE)
8169 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8170 if (TREE_CONSTANT (arg1)
8171 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8172 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8173 /* This optimization is invalid for ordered comparisons
8174 if CONST+INCR overflows or if foo+incr might overflow.
8175 This optimization is invalid for floating point due to rounding.
8176 For pointer types we assume overflow doesn't happen. */
8177 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8178 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8179 && (code == EQ_EXPR || code == NE_EXPR))))
8181 tree varop, newconst;
8183 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8185 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8186 arg1, TREE_OPERAND (arg0, 1));
8187 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8188 TREE_OPERAND (arg0, 0),
8189 TREE_OPERAND (arg0, 1));
8193 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8194 arg1, TREE_OPERAND (arg0, 1));
8195 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8196 TREE_OPERAND (arg0, 0),
8197 TREE_OPERAND (arg0, 1));
8201 /* If VAROP is a reference to a bitfield, we must mask
8202 the constant by the width of the field. */
8203 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8204 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8205 && host_integerp (DECL_SIZE (TREE_OPERAND
8206 (TREE_OPERAND (varop, 0), 1)), 1))
8208 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8209 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8210 tree folded_compare, shift;
8212 /* First check whether the comparison would come out
8213 always the same. If we don't do that we would
8214 change the meaning with the masking. */
8215 folded_compare = fold_build2 (code, type,
8216 TREE_OPERAND (varop, 0), arg1);
8217 if (TREE_CODE (folded_compare) == INTEGER_CST)
8218 return omit_one_operand (type, folded_compare, varop);
8220 shift = build_int_cst (NULL_TREE,
8221 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8222 shift = fold_convert (TREE_TYPE (varop), shift);
8223 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8225 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8229 return fold_build2 (code, type, varop, newconst);
8232 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8233 && (TREE_CODE (arg0) == NOP_EXPR
8234 || TREE_CODE (arg0) == CONVERT_EXPR))
8236 /* If we are widening one operand of an integer comparison,
8237 see if the other operand is similarly being widened. Perhaps we
8238 can do the comparison in the narrower type. */
8239 tem = fold_widened_comparison (code, type, arg0, arg1);
8243 /* Or if we are changing signedness. */
8244 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8249 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8250 constant, we can simplify it. */
8251 if (TREE_CODE (arg1) == INTEGER_CST
8252 && (TREE_CODE (arg0) == MIN_EXPR
8253 || TREE_CODE (arg0) == MAX_EXPR)
8254 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8256 tem = optimize_minmax_comparison (code, type, op0, op1);
8261 /* Simplify comparison of something with itself. (For IEEE
8262 floating-point, we can only do some of these simplifications.) */
8263 if (operand_equal_p (arg0, arg1, 0))
8268 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8269 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8270 return constant_boolean_node (1, type);
8275 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8276 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8277 return constant_boolean_node (1, type);
8278 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8281 /* For NE, we can only do this simplification if integer
8282 or we don't honor IEEE floating point NaNs. */
8283 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8284 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8286 /* ... fall through ... */
8289 return constant_boolean_node (0, type);
8295 /* If we are comparing an expression that just has comparisons
8296 of two integer values, arithmetic expressions of those comparisons,
8297 and constants, we can simplify it. There are only three cases
8298 to check: the two values can either be equal, the first can be
8299 greater, or the second can be greater. Fold the expression for
8300 those three values. Since each value must be 0 or 1, we have
8301 eight possibilities, each of which corresponds to the constant 0
8302 or 1 or one of the six possible comparisons.
8304 This handles common cases like (a > b) == 0 but also handles
8305 expressions like ((x > y) - (y > x)) > 0, which supposedly
8306 occur in macroized code. */
8308 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8310 tree cval1 = 0, cval2 = 0;
8313 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8314 /* Don't handle degenerate cases here; they should already
8315 have been handled anyway. */
8316 && cval1 != 0 && cval2 != 0
8317 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8318 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8319 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8320 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8321 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8322 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8323 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8325 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8326 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8328 /* We can't just pass T to eval_subst in case cval1 or cval2
8329 was the same as ARG1. */
8332 = fold_build2 (code, type,
8333 eval_subst (arg0, cval1, maxval,
8337 = fold_build2 (code, type,
8338 eval_subst (arg0, cval1, maxval,
8342 = fold_build2 (code, type,
8343 eval_subst (arg0, cval1, minval,
8347 /* All three of these results should be 0 or 1. Confirm they are.
8348 Then use those values to select the proper code to use. */
8350 if (TREE_CODE (high_result) == INTEGER_CST
8351 && TREE_CODE (equal_result) == INTEGER_CST
8352 && TREE_CODE (low_result) == INTEGER_CST)
8354 /* Make a 3-bit mask with the high-order bit being the
8355 value for `>', the next for '=', and the low for '<'. */
8356 switch ((integer_onep (high_result) * 4)
8357 + (integer_onep (equal_result) * 2)
8358 + integer_onep (low_result))
8362 return omit_one_operand (type, integer_zero_node, arg0);
8383 return omit_one_operand (type, integer_one_node, arg0);
8387 return save_expr (build2 (code, type, cval1, cval2));
8388 return fold_build2 (code, type, cval1, cval2);
8393 /* Fold a comparison of the address of COMPONENT_REFs with the same
8394 type and component to a comparison of the address of the base
8395 object. In short, &x->a OP &y->a to x OP y and
8396 &x->a OP &y.a to x OP &y */
8397 if (TREE_CODE (arg0) == ADDR_EXPR
8398 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8399 && TREE_CODE (arg1) == ADDR_EXPR
8400 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8402 tree cref0 = TREE_OPERAND (arg0, 0);
8403 tree cref1 = TREE_OPERAND (arg1, 0);
8404 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8406 tree op0 = TREE_OPERAND (cref0, 0);
8407 tree op1 = TREE_OPERAND (cref1, 0);
8408 return fold_build2 (code, type,
8409 build_fold_addr_expr (op0),
8410 build_fold_addr_expr (op1));
8414 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8415 into a single range test. */
8416 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8417 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8418 && TREE_CODE (arg1) == INTEGER_CST
8419 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8420 && !integer_zerop (TREE_OPERAND (arg0, 1))
8421 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8422 && !TREE_OVERFLOW (arg1))
8424 tem = fold_div_compare (code, type, arg0, arg1);
8425 if (tem != NULL_TREE)
8433 /* Subroutine of fold_binary. Optimize complex multiplications of the
8434 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8435 argument EXPR represents the expression "z" of type TYPE. */
8438 fold_mult_zconjz (tree type, tree expr)
8440 tree itype = TREE_TYPE (type);
8441 tree rpart, ipart, tem;
8443 if (TREE_CODE (expr) == COMPLEX_EXPR)
8445 rpart = TREE_OPERAND (expr, 0);
8446 ipart = TREE_OPERAND (expr, 1);
8448 else if (TREE_CODE (expr) == COMPLEX_CST)
8450 rpart = TREE_REALPART (expr);
8451 ipart = TREE_IMAGPART (expr);
8455 expr = save_expr (expr);
8456 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8457 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8460 rpart = save_expr (rpart);
8461 ipart = save_expr (ipart);
8462 tem = fold_build2 (PLUS_EXPR, itype,
8463 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8464 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8465 return fold_build2 (COMPLEX_EXPR, type, tem,
8466 fold_convert (itype, integer_zero_node));
8470 /* Fold a binary expression of code CODE and type TYPE with operands
8471 OP0 and OP1. Return the folded expression if folding is
8472 successful. Otherwise, return NULL_TREE. */
8475 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8477 enum tree_code_class kind = TREE_CODE_CLASS (code);
8478 tree arg0, arg1, tem;
8479 tree t1 = NULL_TREE;
8480 bool strict_overflow_p;
8482 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8483 && TREE_CODE_LENGTH (code) == 2
8485 && op1 != NULL_TREE);
8490 /* Strip any conversions that don't change the mode. This is
8491 safe for every expression, except for a comparison expression
8492 because its signedness is derived from its operands. So, in
8493 the latter case, only strip conversions that don't change the
8496 Note that this is done as an internal manipulation within the
8497 constant folder, in order to find the simplest representation
8498 of the arguments so that their form can be studied. In any
8499 cases, the appropriate type conversions should be put back in
8500 the tree that will get out of the constant folder. */
8502 if (kind == tcc_comparison)
8504 STRIP_SIGN_NOPS (arg0);
8505 STRIP_SIGN_NOPS (arg1);
8513 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8514 constant but we can't do arithmetic on them. */
8515 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8516 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8517 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8518 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8520 if (kind == tcc_binary)
8521 tem = const_binop (code, arg0, arg1, 0);
8522 else if (kind == tcc_comparison)
8523 tem = fold_relational_const (code, type, arg0, arg1);
8527 if (tem != NULL_TREE)
8529 if (TREE_TYPE (tem) != type)
8530 tem = fold_convert (type, tem);
8535 /* If this is a commutative operation, and ARG0 is a constant, move it
8536 to ARG1 to reduce the number of tests below. */
8537 if (commutative_tree_code (code)
8538 && tree_swap_operands_p (arg0, arg1, true))
8539 return fold_build2 (code, type, op1, op0);
8541 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8543 First check for cases where an arithmetic operation is applied to a
8544 compound, conditional, or comparison operation. Push the arithmetic
8545 operation inside the compound or conditional to see if any folding
8546 can then be done. Convert comparison to conditional for this purpose.
8547 The also optimizes non-constant cases that used to be done in
8550 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8551 one of the operands is a comparison and the other is a comparison, a
8552 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8553 code below would make the expression more complex. Change it to a
8554 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8555 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8557 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8558 || code == EQ_EXPR || code == NE_EXPR)
8559 && ((truth_value_p (TREE_CODE (arg0))
8560 && (truth_value_p (TREE_CODE (arg1))
8561 || (TREE_CODE (arg1) == BIT_AND_EXPR
8562 && integer_onep (TREE_OPERAND (arg1, 1)))))
8563 || (truth_value_p (TREE_CODE (arg1))
8564 && (truth_value_p (TREE_CODE (arg0))
8565 || (TREE_CODE (arg0) == BIT_AND_EXPR
8566 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8568 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8569 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8572 fold_convert (boolean_type_node, arg0),
8573 fold_convert (boolean_type_node, arg1));
8575 if (code == EQ_EXPR)
8576 tem = invert_truthvalue (tem);
8578 return fold_convert (type, tem);
8581 if (TREE_CODE_CLASS (code) == tcc_binary
8582 || TREE_CODE_CLASS (code) == tcc_comparison)
8584 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8585 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8586 fold_build2 (code, type,
8587 TREE_OPERAND (arg0, 1), op1));
8588 if (TREE_CODE (arg1) == COMPOUND_EXPR
8589 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8590 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8591 fold_build2 (code, type,
8592 op0, TREE_OPERAND (arg1, 1)));
8594 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8596 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8598 /*cond_first_p=*/1);
8599 if (tem != NULL_TREE)
8603 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8605 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8607 /*cond_first_p=*/0);
8608 if (tem != NULL_TREE)
8616 /* A + (-B) -> A - B */
8617 if (TREE_CODE (arg1) == NEGATE_EXPR)
8618 return fold_build2 (MINUS_EXPR, type,
8619 fold_convert (type, arg0),
8620 fold_convert (type, TREE_OPERAND (arg1, 0)));
8621 /* (-A) + B -> B - A */
8622 if (TREE_CODE (arg0) == NEGATE_EXPR
8623 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8624 return fold_build2 (MINUS_EXPR, type,
8625 fold_convert (type, arg1),
8626 fold_convert (type, TREE_OPERAND (arg0, 0)));
8627 /* Convert ~A + 1 to -A. */
8628 if (INTEGRAL_TYPE_P (type)
8629 && TREE_CODE (arg0) == BIT_NOT_EXPR
8630 && integer_onep (arg1))
8631 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8633 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8635 if ((TREE_CODE (arg0) == MULT_EXPR
8636 || TREE_CODE (arg1) == MULT_EXPR)
8637 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8639 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8644 if (! FLOAT_TYPE_P (type))
8646 if (integer_zerop (arg1))
8647 return non_lvalue (fold_convert (type, arg0));
8649 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8650 with a constant, and the two constants have no bits in common,
8651 we should treat this as a BIT_IOR_EXPR since this may produce more
8653 if (TREE_CODE (arg0) == BIT_AND_EXPR
8654 && TREE_CODE (arg1) == BIT_AND_EXPR
8655 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8656 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8657 && integer_zerop (const_binop (BIT_AND_EXPR,
8658 TREE_OPERAND (arg0, 1),
8659 TREE_OPERAND (arg1, 1), 0)))
8661 code = BIT_IOR_EXPR;
8665 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8666 (plus (plus (mult) (mult)) (foo)) so that we can
8667 take advantage of the factoring cases below. */
8668 if (((TREE_CODE (arg0) == PLUS_EXPR
8669 || TREE_CODE (arg0) == MINUS_EXPR)
8670 && TREE_CODE (arg1) == MULT_EXPR)
8671 || ((TREE_CODE (arg1) == PLUS_EXPR
8672 || TREE_CODE (arg1) == MINUS_EXPR)
8673 && TREE_CODE (arg0) == MULT_EXPR))
8675 tree parg0, parg1, parg, marg;
8676 enum tree_code pcode;
8678 if (TREE_CODE (arg1) == MULT_EXPR)
8679 parg = arg0, marg = arg1;
8681 parg = arg1, marg = arg0;
8682 pcode = TREE_CODE (parg);
8683 parg0 = TREE_OPERAND (parg, 0);
8684 parg1 = TREE_OPERAND (parg, 1);
8688 if (TREE_CODE (parg0) == MULT_EXPR
8689 && TREE_CODE (parg1) != MULT_EXPR)
8690 return fold_build2 (pcode, type,
8691 fold_build2 (PLUS_EXPR, type,
8692 fold_convert (type, parg0),
8693 fold_convert (type, marg)),
8694 fold_convert (type, parg1));
8695 if (TREE_CODE (parg0) != MULT_EXPR
8696 && TREE_CODE (parg1) == MULT_EXPR)
8697 return fold_build2 (PLUS_EXPR, type,
8698 fold_convert (type, parg0),
8699 fold_build2 (pcode, type,
8700 fold_convert (type, marg),
8705 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8706 of the array. Loop optimizer sometimes produce this type of
8708 if (TREE_CODE (arg0) == ADDR_EXPR)
8710 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8712 return fold_convert (type, tem);
8714 else if (TREE_CODE (arg1) == ADDR_EXPR)
8716 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8718 return fold_convert (type, tem);
8723 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8724 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8725 return non_lvalue (fold_convert (type, arg0));
8727 /* Likewise if the operands are reversed. */
8728 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8729 return non_lvalue (fold_convert (type, arg1));
8731 /* Convert X + -C into X - C. */
8732 if (TREE_CODE (arg1) == REAL_CST
8733 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8735 tem = fold_negate_const (arg1, type);
8736 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8737 return fold_build2 (MINUS_EXPR, type,
8738 fold_convert (type, arg0),
8739 fold_convert (type, tem));
8742 if (flag_unsafe_math_optimizations
8743 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8744 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8745 && (tem = distribute_real_division (code, type, arg0, arg1)))
8748 /* Convert x+x into x*2.0. */
8749 if (operand_equal_p (arg0, arg1, 0)
8750 && SCALAR_FLOAT_TYPE_P (type))
8751 return fold_build2 (MULT_EXPR, type, arg0,
8752 build_real (type, dconst2));
8754 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8755 if (flag_unsafe_math_optimizations
8756 && TREE_CODE (arg1) == PLUS_EXPR
8757 && TREE_CODE (arg0) != MULT_EXPR)
8759 tree tree10 = TREE_OPERAND (arg1, 0);
8760 tree tree11 = TREE_OPERAND (arg1, 1);
8761 if (TREE_CODE (tree11) == MULT_EXPR
8762 && TREE_CODE (tree10) == MULT_EXPR)
8765 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8766 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8769 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8770 if (flag_unsafe_math_optimizations
8771 && TREE_CODE (arg0) == PLUS_EXPR
8772 && TREE_CODE (arg1) != MULT_EXPR)
8774 tree tree00 = TREE_OPERAND (arg0, 0);
8775 tree tree01 = TREE_OPERAND (arg0, 1);
8776 if (TREE_CODE (tree01) == MULT_EXPR
8777 && TREE_CODE (tree00) == MULT_EXPR)
8780 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8781 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
8787 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8788 is a rotate of A by C1 bits. */
8789 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8790 is a rotate of A by B bits. */
8792 enum tree_code code0, code1;
8793 code0 = TREE_CODE (arg0);
8794 code1 = TREE_CODE (arg1);
8795 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
8796 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
8797 && operand_equal_p (TREE_OPERAND (arg0, 0),
8798 TREE_OPERAND (arg1, 0), 0)
8799 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8801 tree tree01, tree11;
8802 enum tree_code code01, code11;
8804 tree01 = TREE_OPERAND (arg0, 1);
8805 tree11 = TREE_OPERAND (arg1, 1);
8806 STRIP_NOPS (tree01);
8807 STRIP_NOPS (tree11);
8808 code01 = TREE_CODE (tree01);
8809 code11 = TREE_CODE (tree11);
8810 if (code01 == INTEGER_CST
8811 && code11 == INTEGER_CST
8812 && TREE_INT_CST_HIGH (tree01) == 0
8813 && TREE_INT_CST_HIGH (tree11) == 0
8814 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
8815 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
8816 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
8817 code0 == LSHIFT_EXPR ? tree01 : tree11);
8818 else if (code11 == MINUS_EXPR)
8820 tree tree110, tree111;
8821 tree110 = TREE_OPERAND (tree11, 0);
8822 tree111 = TREE_OPERAND (tree11, 1);
8823 STRIP_NOPS (tree110);
8824 STRIP_NOPS (tree111);
8825 if (TREE_CODE (tree110) == INTEGER_CST
8826 && 0 == compare_tree_int (tree110,
8828 (TREE_TYPE (TREE_OPERAND
8830 && operand_equal_p (tree01, tree111, 0))
8831 return build2 ((code0 == LSHIFT_EXPR
8834 type, TREE_OPERAND (arg0, 0), tree01);
8836 else if (code01 == MINUS_EXPR)
8838 tree tree010, tree011;
8839 tree010 = TREE_OPERAND (tree01, 0);
8840 tree011 = TREE_OPERAND (tree01, 1);
8841 STRIP_NOPS (tree010);
8842 STRIP_NOPS (tree011);
8843 if (TREE_CODE (tree010) == INTEGER_CST
8844 && 0 == compare_tree_int (tree010,
8846 (TREE_TYPE (TREE_OPERAND
8848 && operand_equal_p (tree11, tree011, 0))
8849 return build2 ((code0 != LSHIFT_EXPR
8852 type, TREE_OPERAND (arg0, 0), tree11);
8858 /* In most languages, can't associate operations on floats through
8859 parentheses. Rather than remember where the parentheses were, we
8860 don't associate floats at all, unless the user has specified
8861 -funsafe-math-optimizations. */
8863 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8865 tree var0, con0, lit0, minus_lit0;
8866 tree var1, con1, lit1, minus_lit1;
8869 /* Split both trees into variables, constants, and literals. Then
8870 associate each group together, the constants with literals,
8871 then the result with variables. This increases the chances of
8872 literals being recombined later and of generating relocatable
8873 expressions for the sum of a constant and literal. */
8874 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
8875 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
8876 code == MINUS_EXPR);
8878 /* With undefined overflow we can only associate constants
8879 with one variable. */
8880 if ((POINTER_TYPE_P (type)
8881 || (INTEGRAL_TYPE_P (type)
8882 && !(TYPE_UNSIGNED (type) || flag_wrapv)))
8888 if (TREE_CODE (tmp0) == NEGATE_EXPR)
8889 tmp0 = TREE_OPERAND (tmp0, 0);
8890 if (TREE_CODE (tmp1) == NEGATE_EXPR)
8891 tmp1 = TREE_OPERAND (tmp1, 0);
8892 /* The only case we can still associate with two variables
8893 is if they are the same, modulo negation. */
8894 if (!operand_equal_p (tmp0, tmp1, 0))
8898 /* Only do something if we found more than two objects. Otherwise,
8899 nothing has changed and we risk infinite recursion. */
8901 && (2 < ((var0 != 0) + (var1 != 0)
8902 + (con0 != 0) + (con1 != 0)
8903 + (lit0 != 0) + (lit1 != 0)
8904 + (minus_lit0 != 0) + (minus_lit1 != 0))))
8906 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
8907 if (code == MINUS_EXPR)
8910 var0 = associate_trees (var0, var1, code, type);
8911 con0 = associate_trees (con0, con1, code, type);
8912 lit0 = associate_trees (lit0, lit1, code, type);
8913 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
8915 /* Preserve the MINUS_EXPR if the negative part of the literal is
8916 greater than the positive part. Otherwise, the multiplicative
8917 folding code (i.e extract_muldiv) may be fooled in case
8918 unsigned constants are subtracted, like in the following
8919 example: ((X*2 + 4) - 8U)/2. */
8920 if (minus_lit0 && lit0)
8922 if (TREE_CODE (lit0) == INTEGER_CST
8923 && TREE_CODE (minus_lit0) == INTEGER_CST
8924 && tree_int_cst_lt (lit0, minus_lit0))
8926 minus_lit0 = associate_trees (minus_lit0, lit0,
8932 lit0 = associate_trees (lit0, minus_lit0,
8940 return fold_convert (type,
8941 associate_trees (var0, minus_lit0,
8945 con0 = associate_trees (con0, minus_lit0,
8947 return fold_convert (type,
8948 associate_trees (var0, con0,
8953 con0 = associate_trees (con0, lit0, code, type);
8954 return fold_convert (type, associate_trees (var0, con0,
8962 /* A - (-B) -> A + B */
8963 if (TREE_CODE (arg1) == NEGATE_EXPR)
8964 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
8965 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
8966 if (TREE_CODE (arg0) == NEGATE_EXPR
8967 && (FLOAT_TYPE_P (type)
8968 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
8969 && negate_expr_p (arg1)
8970 && reorder_operands_p (arg0, arg1))
8971 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
8972 TREE_OPERAND (arg0, 0));
8973 /* Convert -A - 1 to ~A. */
8974 if (INTEGRAL_TYPE_P (type)
8975 && TREE_CODE (arg0) == NEGATE_EXPR
8976 && integer_onep (arg1))
8977 return fold_build1 (BIT_NOT_EXPR, type,
8978 fold_convert (type, TREE_OPERAND (arg0, 0)));
8980 /* Convert -1 - A to ~A. */
8981 if (INTEGRAL_TYPE_P (type)
8982 && integer_all_onesp (arg0))
8983 return fold_build1 (BIT_NOT_EXPR, type, arg1);
8985 if (! FLOAT_TYPE_P (type))
8987 if (integer_zerop (arg0))
8988 return negate_expr (fold_convert (type, arg1));
8989 if (integer_zerop (arg1))
8990 return non_lvalue (fold_convert (type, arg0));
8992 /* Fold A - (A & B) into ~B & A. */
8993 if (!TREE_SIDE_EFFECTS (arg0)
8994 && TREE_CODE (arg1) == BIT_AND_EXPR)
8996 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
8997 return fold_build2 (BIT_AND_EXPR, type,
8998 fold_build1 (BIT_NOT_EXPR, type,
8999 TREE_OPERAND (arg1, 0)),
9001 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9002 return fold_build2 (BIT_AND_EXPR, type,
9003 fold_build1 (BIT_NOT_EXPR, type,
9004 TREE_OPERAND (arg1, 1)),
9008 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9009 any power of 2 minus 1. */
9010 if (TREE_CODE (arg0) == BIT_AND_EXPR
9011 && TREE_CODE (arg1) == BIT_AND_EXPR
9012 && operand_equal_p (TREE_OPERAND (arg0, 0),
9013 TREE_OPERAND (arg1, 0), 0))
9015 tree mask0 = TREE_OPERAND (arg0, 1);
9016 tree mask1 = TREE_OPERAND (arg1, 1);
9017 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9019 if (operand_equal_p (tem, mask1, 0))
9021 tem = fold_build2 (BIT_XOR_EXPR, type,
9022 TREE_OPERAND (arg0, 0), mask1);
9023 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9028 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9029 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9030 return non_lvalue (fold_convert (type, arg0));
9032 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9033 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9034 (-ARG1 + ARG0) reduces to -ARG1. */
9035 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9036 return negate_expr (fold_convert (type, arg1));
9038 /* Fold &x - &x. This can happen from &x.foo - &x.
9039 This is unsafe for certain floats even in non-IEEE formats.
9040 In IEEE, it is unsafe because it does wrong for NaNs.
9041 Also note that operand_equal_p is always false if an operand
9044 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9045 && operand_equal_p (arg0, arg1, 0))
9046 return fold_convert (type, integer_zero_node);
9048 /* A - B -> A + (-B) if B is easily negatable. */
9049 if (negate_expr_p (arg1)
9050 && ((FLOAT_TYPE_P (type)
9051 /* Avoid this transformation if B is a positive REAL_CST. */
9052 && (TREE_CODE (arg1) != REAL_CST
9053 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9054 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
9055 return fold_build2 (PLUS_EXPR, type,
9056 fold_convert (type, arg0),
9057 fold_convert (type, negate_expr (arg1)));
9059 /* Try folding difference of addresses. */
9063 if ((TREE_CODE (arg0) == ADDR_EXPR
9064 || TREE_CODE (arg1) == ADDR_EXPR)
9065 && ptr_difference_const (arg0, arg1, &diff))
9066 return build_int_cst_type (type, diff);
9069 /* Fold &a[i] - &a[j] to i-j. */
9070 if (TREE_CODE (arg0) == ADDR_EXPR
9071 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9072 && TREE_CODE (arg1) == ADDR_EXPR
9073 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9075 tree aref0 = TREE_OPERAND (arg0, 0);
9076 tree aref1 = TREE_OPERAND (arg1, 0);
9077 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9078 TREE_OPERAND (aref1, 0), 0))
9080 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9081 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9082 tree esz = array_ref_element_size (aref0);
9083 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9084 return fold_build2 (MULT_EXPR, type, diff,
9085 fold_convert (type, esz));
9090 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9091 of the array. Loop optimizer sometimes produce this type of
9093 if (TREE_CODE (arg0) == ADDR_EXPR)
9095 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9097 return fold_convert (type, tem);
9100 if (flag_unsafe_math_optimizations
9101 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9102 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9103 && (tem = distribute_real_division (code, type, arg0, arg1)))
9106 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9108 if ((TREE_CODE (arg0) == MULT_EXPR
9109 || TREE_CODE (arg1) == MULT_EXPR)
9110 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9112 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9120 /* (-A) * (-B) -> A * B */
9121 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9122 return fold_build2 (MULT_EXPR, type,
9123 fold_convert (type, TREE_OPERAND (arg0, 0)),
9124 fold_convert (type, negate_expr (arg1)));
9125 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9126 return fold_build2 (MULT_EXPR, type,
9127 fold_convert (type, negate_expr (arg0)),
9128 fold_convert (type, TREE_OPERAND (arg1, 0)));
9130 if (! FLOAT_TYPE_P (type))
9132 if (integer_zerop (arg1))
9133 return omit_one_operand (type, arg1, arg0);
9134 if (integer_onep (arg1))
9135 return non_lvalue (fold_convert (type, arg0));
9136 /* Transform x * -1 into -x. */
9137 if (integer_all_onesp (arg1))
9138 return fold_convert (type, negate_expr (arg0));
9140 /* (a * (1 << b)) is (a << b) */
9141 if (TREE_CODE (arg1) == LSHIFT_EXPR
9142 && integer_onep (TREE_OPERAND (arg1, 0)))
9143 return fold_build2 (LSHIFT_EXPR, type, arg0,
9144 TREE_OPERAND (arg1, 1));
9145 if (TREE_CODE (arg0) == LSHIFT_EXPR
9146 && integer_onep (TREE_OPERAND (arg0, 0)))
9147 return fold_build2 (LSHIFT_EXPR, type, arg1,
9148 TREE_OPERAND (arg0, 1));
9150 strict_overflow_p = false;
9151 if (TREE_CODE (arg1) == INTEGER_CST
9152 && 0 != (tem = extract_muldiv (op0,
9153 fold_convert (type, arg1),
9155 &strict_overflow_p)))
9157 if (strict_overflow_p)
9158 fold_overflow_warning (("assuming signed overflow does not "
9159 "occur when simplifying "
9161 WARN_STRICT_OVERFLOW_MISC);
9162 return fold_convert (type, tem);
9165 /* Optimize z * conj(z) for integer complex numbers. */
9166 if (TREE_CODE (arg0) == CONJ_EXPR
9167 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9168 return fold_mult_zconjz (type, arg1);
9169 if (TREE_CODE (arg1) == CONJ_EXPR
9170 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9171 return fold_mult_zconjz (type, arg0);
9175 /* Maybe fold x * 0 to 0. The expressions aren't the same
9176 when x is NaN, since x * 0 is also NaN. Nor are they the
9177 same in modes with signed zeros, since multiplying a
9178 negative value by 0 gives -0, not +0. */
9179 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9180 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9181 && real_zerop (arg1))
9182 return omit_one_operand (type, arg1, arg0);
9183 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9184 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9185 && real_onep (arg1))
9186 return non_lvalue (fold_convert (type, arg0));
9188 /* Transform x * -1.0 into -x. */
9189 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9190 && real_minus_onep (arg1))
9191 return fold_convert (type, negate_expr (arg0));
9193 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9194 if (flag_unsafe_math_optimizations
9195 && TREE_CODE (arg0) == RDIV_EXPR
9196 && TREE_CODE (arg1) == REAL_CST
9197 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9199 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9202 return fold_build2 (RDIV_EXPR, type, tem,
9203 TREE_OPERAND (arg0, 1));
9206 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9207 if (operand_equal_p (arg0, arg1, 0))
9209 tree tem = fold_strip_sign_ops (arg0);
9210 if (tem != NULL_TREE)
9212 tem = fold_convert (type, tem);
9213 return fold_build2 (MULT_EXPR, type, tem, tem);
9217 /* Optimize z * conj(z) for floating point complex numbers.
9218 Guarded by flag_unsafe_math_optimizations as non-finite
9219 imaginary components don't produce scalar results. */
9220 if (flag_unsafe_math_optimizations
9221 && TREE_CODE (arg0) == CONJ_EXPR
9222 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9223 return fold_mult_zconjz (type, arg1);
9224 if (flag_unsafe_math_optimizations
9225 && TREE_CODE (arg1) == CONJ_EXPR
9226 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9227 return fold_mult_zconjz (type, arg0);
9229 if (flag_unsafe_math_optimizations)
9231 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9232 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9234 /* Optimizations of root(...)*root(...). */
9235 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9237 tree rootfn, arg, arglist;
9238 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9239 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9241 /* Optimize sqrt(x)*sqrt(x) as x. */
9242 if (BUILTIN_SQRT_P (fcode0)
9243 && operand_equal_p (arg00, arg10, 0)
9244 && ! HONOR_SNANS (TYPE_MODE (type)))
9247 /* Optimize root(x)*root(y) as root(x*y). */
9248 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9249 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9250 arglist = build_tree_list (NULL_TREE, arg);
9251 return build_function_call_expr (rootfn, arglist);
9254 /* Optimize expN(x)*expN(y) as expN(x+y). */
9255 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9257 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9258 tree arg = fold_build2 (PLUS_EXPR, type,
9259 TREE_VALUE (TREE_OPERAND (arg0, 1)),
9260 TREE_VALUE (TREE_OPERAND (arg1, 1)));
9261 tree arglist = build_tree_list (NULL_TREE, arg);
9262 return build_function_call_expr (expfn, arglist);
9265 /* Optimizations of pow(...)*pow(...). */
9266 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9267 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9268 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9270 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9271 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9273 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9274 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9277 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9278 if (operand_equal_p (arg01, arg11, 0))
9280 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9281 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9282 tree arglist = tree_cons (NULL_TREE, arg,
9283 build_tree_list (NULL_TREE,
9285 return build_function_call_expr (powfn, arglist);
9288 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9289 if (operand_equal_p (arg00, arg10, 0))
9291 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9292 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9293 tree arglist = tree_cons (NULL_TREE, arg00,
9294 build_tree_list (NULL_TREE,
9296 return build_function_call_expr (powfn, arglist);
9300 /* Optimize tan(x)*cos(x) as sin(x). */
9301 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9302 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9303 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9304 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9305 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9306 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9307 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9308 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9310 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9312 if (sinfn != NULL_TREE)
9313 return build_function_call_expr (sinfn,
9314 TREE_OPERAND (arg0, 1));
9317 /* Optimize x*pow(x,c) as pow(x,c+1). */
9318 if (fcode1 == BUILT_IN_POW
9319 || fcode1 == BUILT_IN_POWF
9320 || fcode1 == BUILT_IN_POWL)
9322 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9323 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9325 if (TREE_CODE (arg11) == REAL_CST
9326 && ! TREE_CONSTANT_OVERFLOW (arg11)
9327 && operand_equal_p (arg0, arg10, 0))
9329 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9333 c = TREE_REAL_CST (arg11);
9334 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9335 arg = build_real (type, c);
9336 arglist = build_tree_list (NULL_TREE, arg);
9337 arglist = tree_cons (NULL_TREE, arg0, arglist);
9338 return build_function_call_expr (powfn, arglist);
9342 /* Optimize pow(x,c)*x as pow(x,c+1). */
9343 if (fcode0 == BUILT_IN_POW
9344 || fcode0 == BUILT_IN_POWF
9345 || fcode0 == BUILT_IN_POWL)
9347 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9348 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9350 if (TREE_CODE (arg01) == REAL_CST
9351 && ! TREE_CONSTANT_OVERFLOW (arg01)
9352 && operand_equal_p (arg1, arg00, 0))
9354 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9358 c = TREE_REAL_CST (arg01);
9359 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9360 arg = build_real (type, c);
9361 arglist = build_tree_list (NULL_TREE, arg);
9362 arglist = tree_cons (NULL_TREE, arg1, arglist);
9363 return build_function_call_expr (powfn, arglist);
9367 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9369 && operand_equal_p (arg0, arg1, 0))
9371 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9375 tree arg = build_real (type, dconst2);
9376 tree arglist = build_tree_list (NULL_TREE, arg);
9377 arglist = tree_cons (NULL_TREE, arg0, arglist);
9378 return build_function_call_expr (powfn, arglist);
9387 if (integer_all_onesp (arg1))
9388 return omit_one_operand (type, arg1, arg0);
9389 if (integer_zerop (arg1))
9390 return non_lvalue (fold_convert (type, arg0));
9391 if (operand_equal_p (arg0, arg1, 0))
9392 return non_lvalue (fold_convert (type, arg0));
9395 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9396 && INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9397 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9399 t1 = build_int_cst (type, -1);
9400 t1 = force_fit_type (t1, 0, false, false);
9401 return omit_one_operand (type, t1, arg1);
9405 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9406 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9407 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9409 t1 = build_int_cst (type, -1);
9410 t1 = force_fit_type (t1, 0, false, false);
9411 return omit_one_operand (type, t1, arg0);
9414 /* Canonicalize (X & C1) | C2. */
9415 if (TREE_CODE (arg0) == BIT_AND_EXPR
9416 && TREE_CODE (arg1) == INTEGER_CST
9417 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9419 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9420 int width = TYPE_PRECISION (type);
9421 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9422 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9423 hi2 = TREE_INT_CST_HIGH (arg1);
9424 lo2 = TREE_INT_CST_LOW (arg1);
9426 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9427 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9428 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9430 if (width > HOST_BITS_PER_WIDE_INT)
9432 mhi = (unsigned HOST_WIDE_INT) -1
9433 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9439 mlo = (unsigned HOST_WIDE_INT) -1
9440 >> (HOST_BITS_PER_WIDE_INT - width);
9443 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9444 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9445 return fold_build2 (BIT_IOR_EXPR, type,
9446 TREE_OPERAND (arg0, 0), arg1);
9448 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9451 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9452 return fold_build2 (BIT_IOR_EXPR, type,
9453 fold_build2 (BIT_AND_EXPR, type,
9454 TREE_OPERAND (arg0, 0),
9455 build_int_cst_wide (type,
9461 /* (X & Y) | Y is (X, Y). */
9462 if (TREE_CODE (arg0) == BIT_AND_EXPR
9463 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9464 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9465 /* (X & Y) | X is (Y, X). */
9466 if (TREE_CODE (arg0) == BIT_AND_EXPR
9467 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9468 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9469 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9470 /* X | (X & Y) is (Y, X). */
9471 if (TREE_CODE (arg1) == BIT_AND_EXPR
9472 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9473 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9474 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9475 /* X | (Y & X) is (Y, X). */
9476 if (TREE_CODE (arg1) == BIT_AND_EXPR
9477 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9478 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9479 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9481 t1 = distribute_bit_expr (code, type, arg0, arg1);
9482 if (t1 != NULL_TREE)
9485 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9487 This results in more efficient code for machines without a NAND
9488 instruction. Combine will canonicalize to the first form
9489 which will allow use of NAND instructions provided by the
9490 backend if they exist. */
9491 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9492 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9494 return fold_build1 (BIT_NOT_EXPR, type,
9495 build2 (BIT_AND_EXPR, type,
9496 TREE_OPERAND (arg0, 0),
9497 TREE_OPERAND (arg1, 0)));
9500 /* See if this can be simplified into a rotate first. If that
9501 is unsuccessful continue in the association code. */
9505 if (integer_zerop (arg1))
9506 return non_lvalue (fold_convert (type, arg0));
9507 if (integer_all_onesp (arg1))
9508 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9509 if (operand_equal_p (arg0, arg1, 0))
9510 return omit_one_operand (type, integer_zero_node, arg0);
9513 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9514 && INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9515 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9517 t1 = build_int_cst (type, -1);
9518 t1 = force_fit_type (t1, 0, false, false);
9519 return omit_one_operand (type, t1, arg1);
9523 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9524 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9525 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9527 t1 = build_int_cst (type, -1);
9528 t1 = force_fit_type (t1, 0, false, false);
9529 return omit_one_operand (type, t1, arg0);
9532 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9533 with a constant, and the two constants have no bits in common,
9534 we should treat this as a BIT_IOR_EXPR since this may produce more
9536 if (TREE_CODE (arg0) == BIT_AND_EXPR
9537 && TREE_CODE (arg1) == BIT_AND_EXPR
9538 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9539 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9540 && integer_zerop (const_binop (BIT_AND_EXPR,
9541 TREE_OPERAND (arg0, 1),
9542 TREE_OPERAND (arg1, 1), 0)))
9544 code = BIT_IOR_EXPR;
9548 /* (X | Y) ^ X -> Y & ~ X*/
9549 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9550 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9552 tree t2 = TREE_OPERAND (arg0, 1);
9553 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9555 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9556 fold_convert (type, t1));
9560 /* (Y | X) ^ X -> Y & ~ X*/
9561 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9562 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9564 tree t2 = TREE_OPERAND (arg0, 0);
9565 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9567 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9568 fold_convert (type, t1));
9572 /* X ^ (X | Y) -> Y & ~ X*/
9573 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9574 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9576 tree t2 = TREE_OPERAND (arg1, 1);
9577 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9579 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9580 fold_convert (type, t1));
9584 /* X ^ (Y | X) -> Y & ~ X*/
9585 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9586 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9588 tree t2 = TREE_OPERAND (arg1, 0);
9589 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9591 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9592 fold_convert (type, t1));
9596 /* Convert ~X ^ ~Y to X ^ Y. */
9597 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9598 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9599 return fold_build2 (code, type,
9600 fold_convert (type, TREE_OPERAND (arg0, 0)),
9601 fold_convert (type, TREE_OPERAND (arg1, 0)));
9603 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9604 if (TREE_CODE (arg0) == BIT_AND_EXPR
9605 && integer_onep (TREE_OPERAND (arg0, 1))
9606 && integer_onep (arg1))
9607 return fold_build2 (EQ_EXPR, type, arg0,
9608 build_int_cst (TREE_TYPE (arg0), 0));
9610 /* Fold (X & Y) ^ Y as ~X & Y. */
9611 if (TREE_CODE (arg0) == BIT_AND_EXPR
9612 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9614 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9615 return fold_build2 (BIT_AND_EXPR, type,
9616 fold_build1 (BIT_NOT_EXPR, type, tem),
9617 fold_convert (type, arg1));
9619 /* Fold (X & Y) ^ X as ~Y & X. */
9620 if (TREE_CODE (arg0) == BIT_AND_EXPR
9621 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9622 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9624 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9625 return fold_build2 (BIT_AND_EXPR, type,
9626 fold_build1 (BIT_NOT_EXPR, type, tem),
9627 fold_convert (type, arg1));
9629 /* Fold X ^ (X & Y) as X & ~Y. */
9630 if (TREE_CODE (arg1) == BIT_AND_EXPR
9631 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9633 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9634 return fold_build2 (BIT_AND_EXPR, type,
9635 fold_convert (type, arg0),
9636 fold_build1 (BIT_NOT_EXPR, type, tem));
9638 /* Fold X ^ (Y & X) as ~Y & X. */
9639 if (TREE_CODE (arg1) == BIT_AND_EXPR
9640 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9641 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9643 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9644 return fold_build2 (BIT_AND_EXPR, type,
9645 fold_build1 (BIT_NOT_EXPR, type, tem),
9646 fold_convert (type, arg0));
9649 /* See if this can be simplified into a rotate first. If that
9650 is unsuccessful continue in the association code. */
9654 if (integer_all_onesp (arg1))
9655 return non_lvalue (fold_convert (type, arg0));
9656 if (integer_zerop (arg1))
9657 return omit_one_operand (type, arg1, arg0);
9658 if (operand_equal_p (arg0, arg1, 0))
9659 return non_lvalue (fold_convert (type, arg0));
9661 /* ~X & X is always zero. */
9662 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9663 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9664 return omit_one_operand (type, integer_zero_node, arg1);
9666 /* X & ~X is always zero. */
9667 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9668 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9669 return omit_one_operand (type, integer_zero_node, arg0);
9671 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9672 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9673 && TREE_CODE (arg1) == INTEGER_CST
9674 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9675 return fold_build2 (BIT_IOR_EXPR, type,
9676 fold_build2 (BIT_AND_EXPR, type,
9677 TREE_OPERAND (arg0, 0), arg1),
9678 fold_build2 (BIT_AND_EXPR, type,
9679 TREE_OPERAND (arg0, 1), arg1));
9681 /* (X | Y) & Y is (X, Y). */
9682 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9683 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9684 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9685 /* (X | Y) & X is (Y, X). */
9686 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9687 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9688 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9689 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9690 /* X & (X | Y) is (Y, X). */
9691 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9692 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9693 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9694 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9695 /* X & (Y | X) is (Y, X). */
9696 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9697 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9698 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9699 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9701 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9702 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9703 && integer_onep (TREE_OPERAND (arg0, 1))
9704 && integer_onep (arg1))
9706 tem = TREE_OPERAND (arg0, 0);
9707 return fold_build2 (EQ_EXPR, type,
9708 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9709 build_int_cst (TREE_TYPE (tem), 1)),
9710 build_int_cst (TREE_TYPE (tem), 0));
9712 /* Fold ~X & 1 as (X & 1) == 0. */
9713 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9714 && integer_onep (arg1))
9716 tem = TREE_OPERAND (arg0, 0);
9717 return fold_build2 (EQ_EXPR, type,
9718 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9719 build_int_cst (TREE_TYPE (tem), 1)),
9720 build_int_cst (TREE_TYPE (tem), 0));
9723 /* Fold (X ^ Y) & Y as ~X & Y. */
9724 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9725 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9727 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9728 return fold_build2 (BIT_AND_EXPR, type,
9729 fold_build1 (BIT_NOT_EXPR, type, tem),
9730 fold_convert (type, arg1));
9732 /* Fold (X ^ Y) & X as ~Y & X. */
9733 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9734 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9735 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9737 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9738 return fold_build2 (BIT_AND_EXPR, type,
9739 fold_build1 (BIT_NOT_EXPR, type, tem),
9740 fold_convert (type, arg1));
9742 /* Fold X & (X ^ Y) as X & ~Y. */
9743 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9744 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9746 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9747 return fold_build2 (BIT_AND_EXPR, type,
9748 fold_convert (type, arg0),
9749 fold_build1 (BIT_NOT_EXPR, type, tem));
9751 /* Fold X & (Y ^ X) as ~Y & X. */
9752 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9753 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9754 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9756 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9757 return fold_build2 (BIT_AND_EXPR, type,
9758 fold_build1 (BIT_NOT_EXPR, type, tem),
9759 fold_convert (type, arg0));
9762 t1 = distribute_bit_expr (code, type, arg0, arg1);
9763 if (t1 != NULL_TREE)
9765 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9766 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9767 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9770 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9772 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9773 && (~TREE_INT_CST_LOW (arg1)
9774 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9775 return fold_convert (type, TREE_OPERAND (arg0, 0));
9778 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9780 This results in more efficient code for machines without a NOR
9781 instruction. Combine will canonicalize to the first form
9782 which will allow use of NOR instructions provided by the
9783 backend if they exist. */
9784 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9785 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9787 return fold_build1 (BIT_NOT_EXPR, type,
9788 build2 (BIT_IOR_EXPR, type,
9789 TREE_OPERAND (arg0, 0),
9790 TREE_OPERAND (arg1, 0)));
9796 /* Don't touch a floating-point divide by zero unless the mode
9797 of the constant can represent infinity. */
9798 if (TREE_CODE (arg1) == REAL_CST
9799 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
9800 && real_zerop (arg1))
9803 /* Optimize A / A to 1.0 if we don't care about
9804 NaNs or Infinities. Skip the transformation
9805 for non-real operands. */
9806 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
9807 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9808 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
9809 && operand_equal_p (arg0, arg1, 0))
9811 tree r = build_real (TREE_TYPE (arg0), dconst1);
9813 return omit_two_operands (type, r, arg0, arg1);
9816 /* The complex version of the above A / A optimization. */
9817 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9818 && operand_equal_p (arg0, arg1, 0))
9820 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
9821 if (! HONOR_NANS (TYPE_MODE (elem_type))
9822 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
9824 tree r = build_real (elem_type, dconst1);
9825 /* omit_two_operands will call fold_convert for us. */
9826 return omit_two_operands (type, r, arg0, arg1);
9830 /* (-A) / (-B) -> A / B */
9831 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9832 return fold_build2 (RDIV_EXPR, type,
9833 TREE_OPERAND (arg0, 0),
9834 negate_expr (arg1));
9835 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9836 return fold_build2 (RDIV_EXPR, type,
9838 TREE_OPERAND (arg1, 0));
9840 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
9841 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9842 && real_onep (arg1))
9843 return non_lvalue (fold_convert (type, arg0));
9845 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
9846 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9847 && real_minus_onep (arg1))
9848 return non_lvalue (fold_convert (type, negate_expr (arg0)));
9850 /* If ARG1 is a constant, we can convert this to a multiply by the
9851 reciprocal. This does not have the same rounding properties,
9852 so only do this if -funsafe-math-optimizations. We can actually
9853 always safely do it if ARG1 is a power of two, but it's hard to
9854 tell if it is or not in a portable manner. */
9855 if (TREE_CODE (arg1) == REAL_CST)
9857 if (flag_unsafe_math_optimizations
9858 && 0 != (tem = const_binop (code, build_real (type, dconst1),
9860 return fold_build2 (MULT_EXPR, type, arg0, tem);
9861 /* Find the reciprocal if optimizing and the result is exact. */
9865 r = TREE_REAL_CST (arg1);
9866 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
9868 tem = build_real (type, r);
9869 return fold_build2 (MULT_EXPR, type,
9870 fold_convert (type, arg0), tem);
9874 /* Convert A/B/C to A/(B*C). */
9875 if (flag_unsafe_math_optimizations
9876 && TREE_CODE (arg0) == RDIV_EXPR)
9877 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
9878 fold_build2 (MULT_EXPR, type,
9879 TREE_OPERAND (arg0, 1), arg1));
9881 /* Convert A/(B/C) to (A/B)*C. */
9882 if (flag_unsafe_math_optimizations
9883 && TREE_CODE (arg1) == RDIV_EXPR)
9884 return fold_build2 (MULT_EXPR, type,
9885 fold_build2 (RDIV_EXPR, type, arg0,
9886 TREE_OPERAND (arg1, 0)),
9887 TREE_OPERAND (arg1, 1));
9889 /* Convert C1/(X*C2) into (C1/C2)/X. */
9890 if (flag_unsafe_math_optimizations
9891 && TREE_CODE (arg1) == MULT_EXPR
9892 && TREE_CODE (arg0) == REAL_CST
9893 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
9895 tree tem = const_binop (RDIV_EXPR, arg0,
9896 TREE_OPERAND (arg1, 1), 0);
9898 return fold_build2 (RDIV_EXPR, type, tem,
9899 TREE_OPERAND (arg1, 0));
9902 if (flag_unsafe_math_optimizations)
9904 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9905 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9907 /* Optimize sin(x)/cos(x) as tan(x). */
9908 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
9909 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
9910 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
9911 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9912 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9914 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9916 if (tanfn != NULL_TREE)
9917 return build_function_call_expr (tanfn,
9918 TREE_OPERAND (arg0, 1));
9921 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
9922 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
9923 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
9924 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
9925 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9926 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9928 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9930 if (tanfn != NULL_TREE)
9932 tree tmp = TREE_OPERAND (arg0, 1);
9933 tmp = build_function_call_expr (tanfn, tmp);
9934 return fold_build2 (RDIV_EXPR, type,
9935 build_real (type, dconst1), tmp);
9939 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
9940 NaNs or Infinities. */
9941 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
9942 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
9943 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
9945 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9946 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9948 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9949 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9950 && operand_equal_p (arg00, arg01, 0))
9952 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9954 if (cosfn != NULL_TREE)
9955 return build_function_call_expr (cosfn,
9956 TREE_OPERAND (arg0, 1));
9960 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
9961 NaNs or Infinities. */
9962 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
9963 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
9964 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
9966 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9967 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9969 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9970 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9971 && operand_equal_p (arg00, arg01, 0))
9973 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9975 if (cosfn != NULL_TREE)
9977 tree tmp = TREE_OPERAND (arg0, 1);
9978 tmp = build_function_call_expr (cosfn, tmp);
9979 return fold_build2 (RDIV_EXPR, type,
9980 build_real (type, dconst1),
9986 /* Optimize pow(x,c)/x as pow(x,c-1). */
9987 if (fcode0 == BUILT_IN_POW
9988 || fcode0 == BUILT_IN_POWF
9989 || fcode0 == BUILT_IN_POWL)
9991 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9992 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
9993 if (TREE_CODE (arg01) == REAL_CST
9994 && ! TREE_CONSTANT_OVERFLOW (arg01)
9995 && operand_equal_p (arg1, arg00, 0))
9997 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10001 c = TREE_REAL_CST (arg01);
10002 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
10003 arg = build_real (type, c);
10004 arglist = build_tree_list (NULL_TREE, arg);
10005 arglist = tree_cons (NULL_TREE, arg1, arglist);
10006 return build_function_call_expr (powfn, arglist);
10010 /* Optimize x/expN(y) into x*expN(-y). */
10011 if (BUILTIN_EXPONENT_P (fcode1))
10013 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10014 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
10015 tree arglist = build_tree_list (NULL_TREE,
10016 fold_convert (type, arg));
10017 arg1 = build_function_call_expr (expfn, arglist);
10018 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10021 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10022 if (fcode1 == BUILT_IN_POW
10023 || fcode1 == BUILT_IN_POWF
10024 || fcode1 == BUILT_IN_POWL)
10026 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10027 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10028 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
10029 tree neg11 = fold_convert (type, negate_expr (arg11));
10030 tree arglist = tree_cons(NULL_TREE, arg10,
10031 build_tree_list (NULL_TREE, neg11));
10032 arg1 = build_function_call_expr (powfn, arglist);
10033 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10038 case TRUNC_DIV_EXPR:
10039 case FLOOR_DIV_EXPR:
10040 /* Simplify A / (B << N) where A and B are positive and B is
10041 a power of 2, to A >> (N + log2(B)). */
10042 strict_overflow_p = false;
10043 if (TREE_CODE (arg1) == LSHIFT_EXPR
10044 && (TYPE_UNSIGNED (type)
10045 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10047 tree sval = TREE_OPERAND (arg1, 0);
10048 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10050 tree sh_cnt = TREE_OPERAND (arg1, 1);
10051 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10053 if (strict_overflow_p)
10054 fold_overflow_warning (("assuming signed overflow does not "
10055 "occur when simplifying A / (B << N)"),
10056 WARN_STRICT_OVERFLOW_MISC);
10058 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10059 sh_cnt, build_int_cst (NULL_TREE, pow2));
10060 return fold_build2 (RSHIFT_EXPR, type,
10061 fold_convert (type, arg0), sh_cnt);
10066 case ROUND_DIV_EXPR:
10067 case CEIL_DIV_EXPR:
10068 case EXACT_DIV_EXPR:
10069 if (integer_onep (arg1))
10070 return non_lvalue (fold_convert (type, arg0));
10071 if (integer_zerop (arg1))
10073 /* X / -1 is -X. */
10074 if (!TYPE_UNSIGNED (type)
10075 && TREE_CODE (arg1) == INTEGER_CST
10076 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10077 && TREE_INT_CST_HIGH (arg1) == -1)
10078 return fold_convert (type, negate_expr (arg0));
10080 /* Convert -A / -B to A / B when the type is signed and overflow is
10082 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10083 && TREE_CODE (arg0) == NEGATE_EXPR
10084 && negate_expr_p (arg1))
10086 if (INTEGRAL_TYPE_P (type))
10087 fold_overflow_warning (("assuming signed overflow does not occur "
10088 "when distributing negation across "
10090 WARN_STRICT_OVERFLOW_MISC);
10091 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10092 negate_expr (arg1));
10094 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10095 && TREE_CODE (arg1) == NEGATE_EXPR
10096 && negate_expr_p (arg0))
10098 if (INTEGRAL_TYPE_P (type))
10099 fold_overflow_warning (("assuming signed overflow does not occur "
10100 "when distributing negation across "
10102 WARN_STRICT_OVERFLOW_MISC);
10103 return fold_build2 (code, type, negate_expr (arg0),
10104 TREE_OPERAND (arg1, 0));
10107 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10108 operation, EXACT_DIV_EXPR.
10110 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10111 At one time others generated faster code, it's not clear if they do
10112 after the last round to changes to the DIV code in expmed.c. */
10113 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10114 && multiple_of_p (type, arg0, arg1))
10115 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10117 strict_overflow_p = false;
10118 if (TREE_CODE (arg1) == INTEGER_CST
10119 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10120 &strict_overflow_p)))
10122 if (strict_overflow_p)
10123 fold_overflow_warning (("assuming signed overflow does not occur "
10124 "when simplifying division"),
10125 WARN_STRICT_OVERFLOW_MISC);
10126 return fold_convert (type, tem);
10131 case CEIL_MOD_EXPR:
10132 case FLOOR_MOD_EXPR:
10133 case ROUND_MOD_EXPR:
10134 case TRUNC_MOD_EXPR:
10135 /* X % 1 is always zero, but be sure to preserve any side
10137 if (integer_onep (arg1))
10138 return omit_one_operand (type, integer_zero_node, arg0);
10140 /* X % 0, return X % 0 unchanged so that we can get the
10141 proper warnings and errors. */
10142 if (integer_zerop (arg1))
10145 /* 0 % X is always zero, but be sure to preserve any side
10146 effects in X. Place this after checking for X == 0. */
10147 if (integer_zerop (arg0))
10148 return omit_one_operand (type, integer_zero_node, arg1);
10150 /* X % -1 is zero. */
10151 if (!TYPE_UNSIGNED (type)
10152 && TREE_CODE (arg1) == INTEGER_CST
10153 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10154 && TREE_INT_CST_HIGH (arg1) == -1)
10155 return omit_one_operand (type, integer_zero_node, arg0);
10157 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10158 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10159 strict_overflow_p = false;
10160 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10161 && (TYPE_UNSIGNED (type)
10162 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10165 /* Also optimize A % (C << N) where C is a power of 2,
10166 to A & ((C << N) - 1). */
10167 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10168 c = TREE_OPERAND (arg1, 0);
10170 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10172 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1),
10173 arg1, integer_one_node);
10174 if (strict_overflow_p)
10175 fold_overflow_warning (("assuming signed overflow does not "
10176 "occur when simplifying "
10177 "X % (power of two)"),
10178 WARN_STRICT_OVERFLOW_MISC);
10179 return fold_build2 (BIT_AND_EXPR, type,
10180 fold_convert (type, arg0),
10181 fold_convert (type, mask));
10185 /* X % -C is the same as X % C. */
10186 if (code == TRUNC_MOD_EXPR
10187 && !TYPE_UNSIGNED (type)
10188 && TREE_CODE (arg1) == INTEGER_CST
10189 && !TREE_CONSTANT_OVERFLOW (arg1)
10190 && TREE_INT_CST_HIGH (arg1) < 0
10191 && !TYPE_OVERFLOW_TRAPS (type)
10192 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10193 && !sign_bit_p (arg1, arg1))
10194 return fold_build2 (code, type, fold_convert (type, arg0),
10195 fold_convert (type, negate_expr (arg1)));
10197 /* X % -Y is the same as X % Y. */
10198 if (code == TRUNC_MOD_EXPR
10199 && !TYPE_UNSIGNED (type)
10200 && TREE_CODE (arg1) == NEGATE_EXPR
10201 && !TYPE_OVERFLOW_TRAPS (type))
10202 return fold_build2 (code, type, fold_convert (type, arg0),
10203 fold_convert (type, TREE_OPERAND (arg1, 0)));
10205 if (TREE_CODE (arg1) == INTEGER_CST
10206 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10207 &strict_overflow_p)))
10209 if (strict_overflow_p)
10210 fold_overflow_warning (("assuming signed overflow does not occur "
10211 "when simplifying modulos"),
10212 WARN_STRICT_OVERFLOW_MISC);
10213 return fold_convert (type, tem);
10220 if (integer_all_onesp (arg0))
10221 return omit_one_operand (type, arg0, arg1);
10225 /* Optimize -1 >> x for arithmetic right shifts. */
10226 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10227 return omit_one_operand (type, arg0, arg1);
10228 /* ... fall through ... */
10232 if (integer_zerop (arg1))
10233 return non_lvalue (fold_convert (type, arg0));
10234 if (integer_zerop (arg0))
10235 return omit_one_operand (type, arg0, arg1);
10237 /* Since negative shift count is not well-defined,
10238 don't try to compute it in the compiler. */
10239 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10242 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10243 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10244 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10245 && host_integerp (TREE_OPERAND (arg0, 1), false)
10246 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10248 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10249 + TREE_INT_CST_LOW (arg1));
10251 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10252 being well defined. */
10253 if (low >= TYPE_PRECISION (type))
10255 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10256 low = low % TYPE_PRECISION (type);
10257 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10258 return build_int_cst (type, 0);
10260 low = TYPE_PRECISION (type) - 1;
10263 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10264 build_int_cst (type, low));
10267 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10268 into x & ((unsigned)-1 >> c) for unsigned types. */
10269 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10270 || (TYPE_UNSIGNED (type)
10271 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10272 && host_integerp (arg1, false)
10273 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10274 && host_integerp (TREE_OPERAND (arg0, 1), false)
10275 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10277 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10278 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10284 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10286 lshift = build_int_cst (type, -1);
10287 lshift = int_const_binop (code, lshift, arg1, 0);
10289 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10293 /* Rewrite an LROTATE_EXPR by a constant into an
10294 RROTATE_EXPR by a new constant. */
10295 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10297 tree tem = build_int_cst (NULL_TREE,
10298 GET_MODE_BITSIZE (TYPE_MODE (type)));
10299 tem = fold_convert (TREE_TYPE (arg1), tem);
10300 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10301 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10304 /* If we have a rotate of a bit operation with the rotate count and
10305 the second operand of the bit operation both constant,
10306 permute the two operations. */
10307 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10308 && (TREE_CODE (arg0) == BIT_AND_EXPR
10309 || TREE_CODE (arg0) == BIT_IOR_EXPR
10310 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10311 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10312 return fold_build2 (TREE_CODE (arg0), type,
10313 fold_build2 (code, type,
10314 TREE_OPERAND (arg0, 0), arg1),
10315 fold_build2 (code, type,
10316 TREE_OPERAND (arg0, 1), arg1));
10318 /* Two consecutive rotates adding up to the width of the mode can
10320 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10321 && TREE_CODE (arg0) == RROTATE_EXPR
10322 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10323 && TREE_INT_CST_HIGH (arg1) == 0
10324 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10325 && ((TREE_INT_CST_LOW (arg1)
10326 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10327 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10328 return TREE_OPERAND (arg0, 0);
10333 if (operand_equal_p (arg0, arg1, 0))
10334 return omit_one_operand (type, arg0, arg1);
10335 if (INTEGRAL_TYPE_P (type)
10336 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10337 return omit_one_operand (type, arg1, arg0);
10338 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10344 if (operand_equal_p (arg0, arg1, 0))
10345 return omit_one_operand (type, arg0, arg1);
10346 if (INTEGRAL_TYPE_P (type)
10347 && TYPE_MAX_VALUE (type)
10348 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10349 return omit_one_operand (type, arg1, arg0);
10350 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10355 case TRUTH_ANDIF_EXPR:
10356 /* Note that the operands of this must be ints
10357 and their values must be 0 or 1.
10358 ("true" is a fixed value perhaps depending on the language.) */
10359 /* If first arg is constant zero, return it. */
10360 if (integer_zerop (arg0))
10361 return fold_convert (type, arg0);
10362 case TRUTH_AND_EXPR:
10363 /* If either arg is constant true, drop it. */
10364 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10365 return non_lvalue (fold_convert (type, arg1));
10366 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10367 /* Preserve sequence points. */
10368 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10369 return non_lvalue (fold_convert (type, arg0));
10370 /* If second arg is constant zero, result is zero, but first arg
10371 must be evaluated. */
10372 if (integer_zerop (arg1))
10373 return omit_one_operand (type, arg1, arg0);
10374 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10375 case will be handled here. */
10376 if (integer_zerop (arg0))
10377 return omit_one_operand (type, arg0, arg1);
10379 /* !X && X is always false. */
10380 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10381 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10382 return omit_one_operand (type, integer_zero_node, arg1);
10383 /* X && !X is always false. */
10384 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10385 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10386 return omit_one_operand (type, integer_zero_node, arg0);
10388 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10389 means A >= Y && A != MAX, but in this case we know that
10392 if (!TREE_SIDE_EFFECTS (arg0)
10393 && !TREE_SIDE_EFFECTS (arg1))
10395 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10396 if (tem && !operand_equal_p (tem, arg0, 0))
10397 return fold_build2 (code, type, tem, arg1);
10399 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10400 if (tem && !operand_equal_p (tem, arg1, 0))
10401 return fold_build2 (code, type, arg0, tem);
10405 /* We only do these simplifications if we are optimizing. */
10409 /* Check for things like (A || B) && (A || C). We can convert this
10410 to A || (B && C). Note that either operator can be any of the four
10411 truth and/or operations and the transformation will still be
10412 valid. Also note that we only care about order for the
10413 ANDIF and ORIF operators. If B contains side effects, this
10414 might change the truth-value of A. */
10415 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10416 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10417 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10418 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10419 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10420 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10422 tree a00 = TREE_OPERAND (arg0, 0);
10423 tree a01 = TREE_OPERAND (arg0, 1);
10424 tree a10 = TREE_OPERAND (arg1, 0);
10425 tree a11 = TREE_OPERAND (arg1, 1);
10426 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10427 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10428 && (code == TRUTH_AND_EXPR
10429 || code == TRUTH_OR_EXPR));
10431 if (operand_equal_p (a00, a10, 0))
10432 return fold_build2 (TREE_CODE (arg0), type, a00,
10433 fold_build2 (code, type, a01, a11));
10434 else if (commutative && operand_equal_p (a00, a11, 0))
10435 return fold_build2 (TREE_CODE (arg0), type, a00,
10436 fold_build2 (code, type, a01, a10));
10437 else if (commutative && operand_equal_p (a01, a10, 0))
10438 return fold_build2 (TREE_CODE (arg0), type, a01,
10439 fold_build2 (code, type, a00, a11));
10441 /* This case if tricky because we must either have commutative
10442 operators or else A10 must not have side-effects. */
10444 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10445 && operand_equal_p (a01, a11, 0))
10446 return fold_build2 (TREE_CODE (arg0), type,
10447 fold_build2 (code, type, a00, a10),
10451 /* See if we can build a range comparison. */
10452 if (0 != (tem = fold_range_test (code, type, op0, op1)))
10455 /* Check for the possibility of merging component references. If our
10456 lhs is another similar operation, try to merge its rhs with our
10457 rhs. Then try to merge our lhs and rhs. */
10458 if (TREE_CODE (arg0) == code
10459 && 0 != (tem = fold_truthop (code, type,
10460 TREE_OPERAND (arg0, 1), arg1)))
10461 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10463 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10468 case TRUTH_ORIF_EXPR:
10469 /* Note that the operands of this must be ints
10470 and their values must be 0 or true.
10471 ("true" is a fixed value perhaps depending on the language.) */
10472 /* If first arg is constant true, return it. */
10473 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10474 return fold_convert (type, arg0);
10475 case TRUTH_OR_EXPR:
10476 /* If either arg is constant zero, drop it. */
10477 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10478 return non_lvalue (fold_convert (type, arg1));
10479 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10480 /* Preserve sequence points. */
10481 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10482 return non_lvalue (fold_convert (type, arg0));
10483 /* If second arg is constant true, result is true, but we must
10484 evaluate first arg. */
10485 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10486 return omit_one_operand (type, arg1, arg0);
10487 /* Likewise for first arg, but note this only occurs here for
10489 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10490 return omit_one_operand (type, arg0, arg1);
10492 /* !X || X is always true. */
10493 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10494 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10495 return omit_one_operand (type, integer_one_node, arg1);
10496 /* X || !X is always true. */
10497 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10498 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10499 return omit_one_operand (type, integer_one_node, arg0);
10503 case TRUTH_XOR_EXPR:
10504 /* If the second arg is constant zero, drop it. */
10505 if (integer_zerop (arg1))
10506 return non_lvalue (fold_convert (type, arg0));
10507 /* If the second arg is constant true, this is a logical inversion. */
10508 if (integer_onep (arg1))
10510 /* Only call invert_truthvalue if operand is a truth value. */
10511 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10512 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10514 tem = invert_truthvalue (arg0);
10515 return non_lvalue (fold_convert (type, tem));
10517 /* Identical arguments cancel to zero. */
10518 if (operand_equal_p (arg0, arg1, 0))
10519 return omit_one_operand (type, integer_zero_node, arg0);
10521 /* !X ^ X is always true. */
10522 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10523 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10524 return omit_one_operand (type, integer_one_node, arg1);
10526 /* X ^ !X is always true. */
10527 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10528 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10529 return omit_one_operand (type, integer_one_node, arg0);
10535 tem = fold_comparison (code, type, op0, op1);
10536 if (tem != NULL_TREE)
10539 /* bool_var != 0 becomes bool_var. */
10540 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10541 && code == NE_EXPR)
10542 return non_lvalue (fold_convert (type, arg0));
10544 /* bool_var == 1 becomes bool_var. */
10545 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10546 && code == EQ_EXPR)
10547 return non_lvalue (fold_convert (type, arg0));
10549 /* bool_var != 1 becomes !bool_var. */
10550 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10551 && code == NE_EXPR)
10552 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10554 /* bool_var == 0 becomes !bool_var. */
10555 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10556 && code == EQ_EXPR)
10557 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10559 /* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */
10560 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10561 && TREE_CODE (arg1) == INTEGER_CST)
10563 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
10564 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10565 fold_build1 (BIT_NOT_EXPR, cmp_type,
10566 fold_convert (cmp_type, arg1)));
10569 /* If this is an equality comparison of the address of a non-weak
10570 object against zero, then we know the result. */
10571 if (TREE_CODE (arg0) == ADDR_EXPR
10572 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10573 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10574 && integer_zerop (arg1))
10575 return constant_boolean_node (code != EQ_EXPR, type);
10577 /* If this is an equality comparison of the address of two non-weak,
10578 unaliased symbols neither of which are extern (since we do not
10579 have access to attributes for externs), then we know the result. */
10580 if (TREE_CODE (arg0) == ADDR_EXPR
10581 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10582 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10583 && ! lookup_attribute ("alias",
10584 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10585 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10586 && TREE_CODE (arg1) == ADDR_EXPR
10587 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10588 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10589 && ! lookup_attribute ("alias",
10590 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10591 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10593 /* We know that we're looking at the address of two
10594 non-weak, unaliased, static _DECL nodes.
10596 It is both wasteful and incorrect to call operand_equal_p
10597 to compare the two ADDR_EXPR nodes. It is wasteful in that
10598 all we need to do is test pointer equality for the arguments
10599 to the two ADDR_EXPR nodes. It is incorrect to use
10600 operand_equal_p as that function is NOT equivalent to a
10601 C equality test. It can in fact return false for two
10602 objects which would test as equal using the C equality
10604 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10605 return constant_boolean_node (equal
10606 ? code == EQ_EXPR : code != EQ_EXPR,
10610 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10611 a MINUS_EXPR of a constant, we can convert it into a comparison with
10612 a revised constant as long as no overflow occurs. */
10613 if (TREE_CODE (arg1) == INTEGER_CST
10614 && (TREE_CODE (arg0) == PLUS_EXPR
10615 || TREE_CODE (arg0) == MINUS_EXPR)
10616 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10617 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10618 ? MINUS_EXPR : PLUS_EXPR,
10619 fold_convert (TREE_TYPE (arg0), arg1),
10620 TREE_OPERAND (arg0, 1), 0))
10621 && ! TREE_CONSTANT_OVERFLOW (tem))
10622 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10624 /* Similarly for a NEGATE_EXPR. */
10625 if (TREE_CODE (arg0) == NEGATE_EXPR
10626 && TREE_CODE (arg1) == INTEGER_CST
10627 && 0 != (tem = negate_expr (arg1))
10628 && TREE_CODE (tem) == INTEGER_CST
10629 && ! TREE_CONSTANT_OVERFLOW (tem))
10630 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10632 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10633 for !=. Don't do this for ordered comparisons due to overflow. */
10634 if (TREE_CODE (arg0) == MINUS_EXPR
10635 && integer_zerop (arg1))
10636 return fold_build2 (code, type,
10637 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10639 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10640 if (TREE_CODE (arg0) == ABS_EXPR
10641 && (integer_zerop (arg1) || real_zerop (arg1)))
10642 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10644 /* If this is an EQ or NE comparison with zero and ARG0 is
10645 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10646 two operations, but the latter can be done in one less insn
10647 on machines that have only two-operand insns or on which a
10648 constant cannot be the first operand. */
10649 if (TREE_CODE (arg0) == BIT_AND_EXPR
10650 && integer_zerop (arg1))
10652 tree arg00 = TREE_OPERAND (arg0, 0);
10653 tree arg01 = TREE_OPERAND (arg0, 1);
10654 if (TREE_CODE (arg00) == LSHIFT_EXPR
10655 && integer_onep (TREE_OPERAND (arg00, 0)))
10657 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10658 arg01, TREE_OPERAND (arg00, 1));
10659 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10660 build_int_cst (TREE_TYPE (arg0), 1));
10661 return fold_build2 (code, type,
10662 fold_convert (TREE_TYPE (arg1), tem), arg1);
10664 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10665 && integer_onep (TREE_OPERAND (arg01, 0)))
10667 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10668 arg00, TREE_OPERAND (arg01, 1));
10669 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10670 build_int_cst (TREE_TYPE (arg0), 1));
10671 return fold_build2 (code, type,
10672 fold_convert (TREE_TYPE (arg1), tem), arg1);
10676 /* If this is an NE or EQ comparison of zero against the result of a
10677 signed MOD operation whose second operand is a power of 2, make
10678 the MOD operation unsigned since it is simpler and equivalent. */
10679 if (integer_zerop (arg1)
10680 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10681 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10682 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10683 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10684 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10685 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10687 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10688 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10689 fold_convert (newtype,
10690 TREE_OPERAND (arg0, 0)),
10691 fold_convert (newtype,
10692 TREE_OPERAND (arg0, 1)));
10694 return fold_build2 (code, type, newmod,
10695 fold_convert (newtype, arg1));
10698 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10699 C1 is a valid shift constant, and C2 is a power of two, i.e.
10701 if (TREE_CODE (arg0) == BIT_AND_EXPR
10702 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10703 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10705 && integer_pow2p (TREE_OPERAND (arg0, 1))
10706 && integer_zerop (arg1))
10708 tree itype = TREE_TYPE (arg0);
10709 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10710 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10712 /* Check for a valid shift count. */
10713 if (TREE_INT_CST_HIGH (arg001) == 0
10714 && TREE_INT_CST_LOW (arg001) < prec)
10716 tree arg01 = TREE_OPERAND (arg0, 1);
10717 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10718 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10719 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10720 can be rewritten as (X & (C2 << C1)) != 0. */
10721 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10723 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10724 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10725 return fold_build2 (code, type, tem, arg1);
10727 /* Otherwise, for signed (arithmetic) shifts,
10728 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10729 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10730 else if (!TYPE_UNSIGNED (itype))
10731 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10732 arg000, build_int_cst (itype, 0));
10733 /* Otherwise, of unsigned (logical) shifts,
10734 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10735 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10737 return omit_one_operand (type,
10738 code == EQ_EXPR ? integer_one_node
10739 : integer_zero_node,
10744 /* If this is an NE comparison of zero with an AND of one, remove the
10745 comparison since the AND will give the correct value. */
10746 if (code == NE_EXPR
10747 && integer_zerop (arg1)
10748 && TREE_CODE (arg0) == BIT_AND_EXPR
10749 && integer_onep (TREE_OPERAND (arg0, 1)))
10750 return fold_convert (type, arg0);
10752 /* If we have (A & C) == C where C is a power of 2, convert this into
10753 (A & C) != 0. Similarly for NE_EXPR. */
10754 if (TREE_CODE (arg0) == BIT_AND_EXPR
10755 && integer_pow2p (TREE_OPERAND (arg0, 1))
10756 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10757 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10758 arg0, fold_convert (TREE_TYPE (arg0),
10759 integer_zero_node));
10761 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10762 bit, then fold the expression into A < 0 or A >= 0. */
10763 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10767 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10768 Similarly for NE_EXPR. */
10769 if (TREE_CODE (arg0) == BIT_AND_EXPR
10770 && TREE_CODE (arg1) == INTEGER_CST
10771 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10773 tree notc = fold_build1 (BIT_NOT_EXPR,
10774 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10775 TREE_OPERAND (arg0, 1));
10776 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10778 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10779 if (integer_nonzerop (dandnotc))
10780 return omit_one_operand (type, rslt, arg0);
10783 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10784 Similarly for NE_EXPR. */
10785 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10786 && TREE_CODE (arg1) == INTEGER_CST
10787 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10789 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10790 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10791 TREE_OPERAND (arg0, 1), notd);
10792 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10793 if (integer_nonzerop (candnotd))
10794 return omit_one_operand (type, rslt, arg0);
10797 /* If this is a comparison of a field, we may be able to simplify it. */
10798 if (((TREE_CODE (arg0) == COMPONENT_REF
10799 && lang_hooks.can_use_bit_fields_p ())
10800 || TREE_CODE (arg0) == BIT_FIELD_REF)
10801 /* Handle the constant case even without -O
10802 to make sure the warnings are given. */
10803 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10805 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10810 /* Optimize comparisons of strlen vs zero to a compare of the
10811 first character of the string vs zero. To wit,
10812 strlen(ptr) == 0 => *ptr == 0
10813 strlen(ptr) != 0 => *ptr != 0
10814 Other cases should reduce to one of these two (or a constant)
10815 due to the return value of strlen being unsigned. */
10816 if (TREE_CODE (arg0) == CALL_EXPR
10817 && integer_zerop (arg1))
10819 tree fndecl = get_callee_fndecl (arg0);
10823 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10824 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10825 && (arglist = TREE_OPERAND (arg0, 1))
10826 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10827 && ! TREE_CHAIN (arglist))
10829 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
10830 return fold_build2 (code, type, iref,
10831 build_int_cst (TREE_TYPE (iref), 0));
10835 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10836 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10837 if (TREE_CODE (arg0) == RSHIFT_EXPR
10838 && integer_zerop (arg1)
10839 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10841 tree arg00 = TREE_OPERAND (arg0, 0);
10842 tree arg01 = TREE_OPERAND (arg0, 1);
10843 tree itype = TREE_TYPE (arg00);
10844 if (TREE_INT_CST_HIGH (arg01) == 0
10845 && TREE_INT_CST_LOW (arg01)
10846 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
10848 if (TYPE_UNSIGNED (itype))
10850 itype = lang_hooks.types.signed_type (itype);
10851 arg00 = fold_convert (itype, arg00);
10853 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10854 type, arg00, build_int_cst (itype, 0));
10858 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
10859 if (integer_zerop (arg1)
10860 && TREE_CODE (arg0) == BIT_XOR_EXPR)
10861 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10862 TREE_OPERAND (arg0, 1));
10864 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
10865 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10866 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10867 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10868 build_int_cst (TREE_TYPE (arg1), 0));
10869 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
10870 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10871 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10872 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10873 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
10874 build_int_cst (TREE_TYPE (arg1), 0));
10876 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
10877 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10878 && TREE_CODE (arg1) == INTEGER_CST
10879 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10880 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10881 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
10882 TREE_OPERAND (arg0, 1), arg1));
10884 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10885 (X & C) == 0 when C is a single bit. */
10886 if (TREE_CODE (arg0) == BIT_AND_EXPR
10887 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10888 && integer_zerop (arg1)
10889 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10891 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10892 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10893 TREE_OPERAND (arg0, 1));
10894 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10898 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10899 constant C is a power of two, i.e. a single bit. */
10900 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10901 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10902 && integer_zerop (arg1)
10903 && integer_pow2p (TREE_OPERAND (arg0, 1))
10904 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10905 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10907 tree arg00 = TREE_OPERAND (arg0, 0);
10908 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10909 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10912 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10913 when is C is a power of two, i.e. a single bit. */
10914 if (TREE_CODE (arg0) == BIT_AND_EXPR
10915 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10916 && integer_zerop (arg1)
10917 && integer_pow2p (TREE_OPERAND (arg0, 1))
10918 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10919 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10921 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10922 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
10923 arg000, TREE_OPERAND (arg0, 1));
10924 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10925 tem, build_int_cst (TREE_TYPE (tem), 0));
10928 if (integer_zerop (arg1)
10929 && tree_expr_nonzero_p (arg0))
10931 tree res = constant_boolean_node (code==NE_EXPR, type);
10932 return omit_one_operand (type, res, arg0);
10940 tem = fold_comparison (code, type, op0, op1);
10941 if (tem != NULL_TREE)
10944 /* Transform comparisons of the form X +- C CMP X. */
10945 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10946 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10947 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10948 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
10949 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10950 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
10952 tree arg01 = TREE_OPERAND (arg0, 1);
10953 enum tree_code code0 = TREE_CODE (arg0);
10956 if (TREE_CODE (arg01) == REAL_CST)
10957 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10959 is_positive = tree_int_cst_sgn (arg01);
10961 /* (X - c) > X becomes false. */
10962 if (code == GT_EXPR
10963 && ((code0 == MINUS_EXPR && is_positive >= 0)
10964 || (code0 == PLUS_EXPR && is_positive <= 0)))
10966 if (TREE_CODE (arg01) == INTEGER_CST
10967 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10968 fold_overflow_warning (("assuming signed overflow does not "
10969 "occur when assuming that (X - c) > X "
10970 "is always false"),
10971 WARN_STRICT_OVERFLOW_ALL);
10972 return constant_boolean_node (0, type);
10975 /* Likewise (X + c) < X becomes false. */
10976 if (code == LT_EXPR
10977 && ((code0 == PLUS_EXPR && is_positive >= 0)
10978 || (code0 == MINUS_EXPR && is_positive <= 0)))
10980 if (TREE_CODE (arg01) == INTEGER_CST
10981 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10982 fold_overflow_warning (("assuming signed overflow does not "
10983 "occur when assuming that "
10984 "(X + c) < X is always false"),
10985 WARN_STRICT_OVERFLOW_ALL);
10986 return constant_boolean_node (0, type);
10989 /* Convert (X - c) <= X to true. */
10990 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10992 && ((code0 == MINUS_EXPR && is_positive >= 0)
10993 || (code0 == PLUS_EXPR && is_positive <= 0)))
10995 if (TREE_CODE (arg01) == INTEGER_CST
10996 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10997 fold_overflow_warning (("assuming signed overflow does not "
10998 "occur when assuming that "
10999 "(X - c) <= X is always true"),
11000 WARN_STRICT_OVERFLOW_ALL);
11001 return constant_boolean_node (1, type);
11004 /* Convert (X + c) >= X to true. */
11005 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11007 && ((code0 == PLUS_EXPR && is_positive >= 0)
11008 || (code0 == MINUS_EXPR && is_positive <= 0)))
11010 if (TREE_CODE (arg01) == INTEGER_CST
11011 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11012 fold_overflow_warning (("assuming signed overflow does not "
11013 "occur when assuming that "
11014 "(X + c) >= X is always true"),
11015 WARN_STRICT_OVERFLOW_ALL);
11016 return constant_boolean_node (1, type);
11019 if (TREE_CODE (arg01) == INTEGER_CST)
11021 /* Convert X + c > X and X - c < X to true for integers. */
11022 if (code == GT_EXPR
11023 && ((code0 == PLUS_EXPR && is_positive > 0)
11024 || (code0 == MINUS_EXPR && is_positive < 0)))
11026 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11027 fold_overflow_warning (("assuming signed overflow does "
11028 "not occur when assuming that "
11029 "(X + c) > X is always true"),
11030 WARN_STRICT_OVERFLOW_ALL);
11031 return constant_boolean_node (1, type);
11034 if (code == LT_EXPR
11035 && ((code0 == MINUS_EXPR && is_positive > 0)
11036 || (code0 == PLUS_EXPR && is_positive < 0)))
11038 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11039 fold_overflow_warning (("assuming signed overflow does "
11040 "not occur when assuming that "
11041 "(X - c) < X is always true"),
11042 WARN_STRICT_OVERFLOW_ALL);
11043 return constant_boolean_node (1, type);
11046 /* Convert X + c <= X and X - c >= X to false for integers. */
11047 if (code == LE_EXPR
11048 && ((code0 == PLUS_EXPR && is_positive > 0)
11049 || (code0 == MINUS_EXPR && is_positive < 0)))
11051 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11052 fold_overflow_warning (("assuming signed overflow does "
11053 "not occur when assuming that "
11054 "(X + c) <= X is always false"),
11055 WARN_STRICT_OVERFLOW_ALL);
11056 return constant_boolean_node (0, type);
11059 if (code == GE_EXPR
11060 && ((code0 == MINUS_EXPR && is_positive > 0)
11061 || (code0 == PLUS_EXPR && is_positive < 0)))
11063 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11064 fold_overflow_warning (("assuming signed overflow does "
11065 "not occur when assuming that "
11066 "(X - c) >= X is always true"),
11067 WARN_STRICT_OVERFLOW_ALL);
11068 return constant_boolean_node (0, type);
11073 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11074 This transformation affects the cases which are handled in later
11075 optimizations involving comparisons with non-negative constants. */
11076 if (TREE_CODE (arg1) == INTEGER_CST
11077 && TREE_CODE (arg0) != INTEGER_CST
11078 && tree_int_cst_sgn (arg1) > 0)
11080 if (code == GE_EXPR)
11082 arg1 = const_binop (MINUS_EXPR, arg1,
11083 build_int_cst (TREE_TYPE (arg1), 1), 0);
11084 return fold_build2 (GT_EXPR, type, arg0,
11085 fold_convert (TREE_TYPE (arg0), arg1));
11087 if (code == LT_EXPR)
11089 arg1 = const_binop (MINUS_EXPR, arg1,
11090 build_int_cst (TREE_TYPE (arg1), 1), 0);
11091 return fold_build2 (LE_EXPR, type, arg0,
11092 fold_convert (TREE_TYPE (arg0), arg1));
11096 /* Comparisons with the highest or lowest possible integer of
11097 the specified size will have known values. */
11099 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
11101 if (TREE_CODE (arg1) == INTEGER_CST
11102 && ! TREE_CONSTANT_OVERFLOW (arg1)
11103 && width <= 2 * HOST_BITS_PER_WIDE_INT
11104 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11105 || POINTER_TYPE_P (TREE_TYPE (arg1))))
11107 HOST_WIDE_INT signed_max_hi;
11108 unsigned HOST_WIDE_INT signed_max_lo;
11109 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
11111 if (width <= HOST_BITS_PER_WIDE_INT)
11113 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11118 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
11120 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11126 max_lo = signed_max_lo;
11127 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11133 width -= HOST_BITS_PER_WIDE_INT;
11134 signed_max_lo = -1;
11135 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11140 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
11142 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11147 max_hi = signed_max_hi;
11148 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11152 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11153 && TREE_INT_CST_LOW (arg1) == max_lo)
11157 return omit_one_operand (type, integer_zero_node, arg0);
11160 return fold_build2 (EQ_EXPR, type, op0, op1);
11163 return omit_one_operand (type, integer_one_node, arg0);
11166 return fold_build2 (NE_EXPR, type, op0, op1);
11168 /* The GE_EXPR and LT_EXPR cases above are not normally
11169 reached because of previous transformations. */
11174 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11176 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11180 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
11181 return fold_build2 (EQ_EXPR, type,
11182 fold_convert (TREE_TYPE (arg1), arg0),
11185 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
11186 return fold_build2 (NE_EXPR, type,
11187 fold_convert (TREE_TYPE (arg1), arg0),
11192 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11194 && TREE_INT_CST_LOW (arg1) == min_lo)
11198 return omit_one_operand (type, integer_zero_node, arg0);
11201 return fold_build2 (EQ_EXPR, type, op0, op1);
11204 return omit_one_operand (type, integer_one_node, arg0);
11207 return fold_build2 (NE_EXPR, type, op0, op1);
11212 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11214 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11218 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11219 return fold_build2 (NE_EXPR, type,
11220 fold_convert (TREE_TYPE (arg1), arg0),
11223 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11224 return fold_build2 (EQ_EXPR, type,
11225 fold_convert (TREE_TYPE (arg1), arg0),
11231 else if (!in_gimple_form
11232 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
11233 && TREE_INT_CST_LOW (arg1) == signed_max_lo
11234 && TYPE_UNSIGNED (TREE_TYPE (arg1))
11235 /* signed_type does not work on pointer types. */
11236 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
11238 /* The following case also applies to X < signed_max+1
11239 and X >= signed_max+1 because previous transformations. */
11240 if (code == LE_EXPR || code == GT_EXPR)
11243 st = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11244 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
11245 type, fold_convert (st, arg0),
11246 build_int_cst (st, 0));
11252 /* If we are comparing an ABS_EXPR with a constant, we can
11253 convert all the cases into explicit comparisons, but they may
11254 well not be faster than doing the ABS and one comparison.
11255 But ABS (X) <= C is a range comparison, which becomes a subtraction
11256 and a comparison, and is probably faster. */
11257 if (code == LE_EXPR
11258 && TREE_CODE (arg1) == INTEGER_CST
11259 && TREE_CODE (arg0) == ABS_EXPR
11260 && ! TREE_SIDE_EFFECTS (arg0)
11261 && (0 != (tem = negate_expr (arg1)))
11262 && TREE_CODE (tem) == INTEGER_CST
11263 && ! TREE_CONSTANT_OVERFLOW (tem))
11264 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11265 build2 (GE_EXPR, type,
11266 TREE_OPERAND (arg0, 0), tem),
11267 build2 (LE_EXPR, type,
11268 TREE_OPERAND (arg0, 0), arg1));
11270 /* Convert ABS_EXPR<x> >= 0 to true. */
11271 strict_overflow_p = false;
11272 if (code == GE_EXPR
11273 && (integer_zerop (arg1)
11274 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11275 && real_zerop (arg1)))
11276 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11278 if (strict_overflow_p)
11279 fold_overflow_warning (("assuming signed overflow does not occur "
11280 "when simplifying comparison of "
11281 "absolute value and zero"),
11282 WARN_STRICT_OVERFLOW_CONDITIONAL);
11283 return omit_one_operand (type, integer_one_node, arg0);
11286 /* Convert ABS_EXPR<x> < 0 to false. */
11287 strict_overflow_p = false;
11288 if (code == LT_EXPR
11289 && (integer_zerop (arg1) || real_zerop (arg1))
11290 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11292 if (strict_overflow_p)
11293 fold_overflow_warning (("assuming signed overflow does not occur "
11294 "when simplifying comparison of "
11295 "absolute value and zero"),
11296 WARN_STRICT_OVERFLOW_CONDITIONAL);
11297 return omit_one_operand (type, integer_zero_node, arg0);
11300 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11301 and similarly for >= into !=. */
11302 if ((code == LT_EXPR || code == GE_EXPR)
11303 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11304 && TREE_CODE (arg1) == LSHIFT_EXPR
11305 && integer_onep (TREE_OPERAND (arg1, 0)))
11306 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11307 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11308 TREE_OPERAND (arg1, 1)),
11309 build_int_cst (TREE_TYPE (arg0), 0));
11311 if ((code == LT_EXPR || code == GE_EXPR)
11312 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11313 && (TREE_CODE (arg1) == NOP_EXPR
11314 || TREE_CODE (arg1) == CONVERT_EXPR)
11315 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11316 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11318 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11319 fold_convert (TREE_TYPE (arg0),
11320 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11321 TREE_OPERAND (TREE_OPERAND (arg1, 0),
11323 build_int_cst (TREE_TYPE (arg0), 0));
11327 case UNORDERED_EXPR:
11335 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
11337 t1 = fold_relational_const (code, type, arg0, arg1);
11338 if (t1 != NULL_TREE)
11342 /* If the first operand is NaN, the result is constant. */
11343 if (TREE_CODE (arg0) == REAL_CST
11344 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
11345 && (code != LTGT_EXPR || ! flag_trapping_math))
11347 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11348 ? integer_zero_node
11349 : integer_one_node;
11350 return omit_one_operand (type, t1, arg1);
11353 /* If the second operand is NaN, the result is constant. */
11354 if (TREE_CODE (arg1) == REAL_CST
11355 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
11356 && (code != LTGT_EXPR || ! flag_trapping_math))
11358 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11359 ? integer_zero_node
11360 : integer_one_node;
11361 return omit_one_operand (type, t1, arg0);
11364 /* Simplify unordered comparison of something with itself. */
11365 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
11366 && operand_equal_p (arg0, arg1, 0))
11367 return constant_boolean_node (1, type);
11369 if (code == LTGT_EXPR
11370 && !flag_trapping_math
11371 && operand_equal_p (arg0, arg1, 0))
11372 return constant_boolean_node (0, type);
11374 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11376 tree targ0 = strip_float_extensions (arg0);
11377 tree targ1 = strip_float_extensions (arg1);
11378 tree newtype = TREE_TYPE (targ0);
11380 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11381 newtype = TREE_TYPE (targ1);
11383 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11384 return fold_build2 (code, type, fold_convert (newtype, targ0),
11385 fold_convert (newtype, targ1));
11390 case COMPOUND_EXPR:
11391 /* When pedantic, a compound expression can be neither an lvalue
11392 nor an integer constant expression. */
11393 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11395 /* Don't let (0, 0) be null pointer constant. */
11396 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11397 : fold_convert (type, arg1);
11398 return pedantic_non_lvalue (tem);
11401 if ((TREE_CODE (arg0) == REAL_CST
11402 && TREE_CODE (arg1) == REAL_CST)
11403 || (TREE_CODE (arg0) == INTEGER_CST
11404 && TREE_CODE (arg1) == INTEGER_CST))
11405 return build_complex (type, arg0, arg1);
11409 /* An ASSERT_EXPR should never be passed to fold_binary. */
11410 gcc_unreachable ();
11414 } /* switch (code) */
11417 /* Callback for walk_tree, looking for LABEL_EXPR.
11418 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11419 Do not check the sub-tree of GOTO_EXPR. */
11422 contains_label_1 (tree *tp,
11423 int *walk_subtrees,
11424 void *data ATTRIBUTE_UNUSED)
11426 switch (TREE_CODE (*tp))
11431 *walk_subtrees = 0;
11438 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11439 accessible from outside the sub-tree. Returns NULL_TREE if no
11440 addressable label is found. */
11443 contains_label_p (tree st)
11445 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
11448 /* Fold a ternary expression of code CODE and type TYPE with operands
11449 OP0, OP1, and OP2. Return the folded expression if folding is
11450 successful. Otherwise, return NULL_TREE. */
11453 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
11456 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
11457 enum tree_code_class kind = TREE_CODE_CLASS (code);
11459 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11460 && TREE_CODE_LENGTH (code) == 3);
11462 /* Strip any conversions that don't change the mode. This is safe
11463 for every expression, except for a comparison expression because
11464 its signedness is derived from its operands. So, in the latter
11465 case, only strip conversions that don't change the signedness.
11467 Note that this is done as an internal manipulation within the
11468 constant folder, in order to find the simplest representation of
11469 the arguments so that their form can be studied. In any cases,
11470 the appropriate type conversions should be put back in the tree
11471 that will get out of the constant folder. */
11486 case COMPONENT_REF:
11487 if (TREE_CODE (arg0) == CONSTRUCTOR
11488 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11490 unsigned HOST_WIDE_INT idx;
11492 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11499 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11500 so all simple results must be passed through pedantic_non_lvalue. */
11501 if (TREE_CODE (arg0) == INTEGER_CST)
11503 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11504 tem = integer_zerop (arg0) ? op2 : op1;
11505 /* Only optimize constant conditions when the selected branch
11506 has the same type as the COND_EXPR. This avoids optimizing
11507 away "c ? x : throw", where the throw has a void type.
11508 Avoid throwing away that operand which contains label. */
11509 if ((!TREE_SIDE_EFFECTS (unused_op)
11510 || !contains_label_p (unused_op))
11511 && (! VOID_TYPE_P (TREE_TYPE (tem))
11512 || VOID_TYPE_P (type)))
11513 return pedantic_non_lvalue (tem);
11516 if (operand_equal_p (arg1, op2, 0))
11517 return pedantic_omit_one_operand (type, arg1, arg0);
11519 /* If we have A op B ? A : C, we may be able to convert this to a
11520 simpler expression, depending on the operation and the values
11521 of B and C. Signed zeros prevent all of these transformations,
11522 for reasons given above each one.
11524 Also try swapping the arguments and inverting the conditional. */
11525 if (COMPARISON_CLASS_P (arg0)
11526 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11527 arg1, TREE_OPERAND (arg0, 1))
11528 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11530 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11535 if (COMPARISON_CLASS_P (arg0)
11536 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11538 TREE_OPERAND (arg0, 1))
11539 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11541 tem = fold_truth_not_expr (arg0);
11542 if (tem && COMPARISON_CLASS_P (tem))
11544 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11550 /* If the second operand is simpler than the third, swap them
11551 since that produces better jump optimization results. */
11552 if (truth_value_p (TREE_CODE (arg0))
11553 && tree_swap_operands_p (op1, op2, false))
11555 /* See if this can be inverted. If it can't, possibly because
11556 it was a floating-point inequality comparison, don't do
11558 tem = fold_truth_not_expr (arg0);
11560 return fold_build3 (code, type, tem, op2, op1);
11563 /* Convert A ? 1 : 0 to simply A. */
11564 if (integer_onep (op1)
11565 && integer_zerop (op2)
11566 /* If we try to convert OP0 to our type, the
11567 call to fold will try to move the conversion inside
11568 a COND, which will recurse. In that case, the COND_EXPR
11569 is probably the best choice, so leave it alone. */
11570 && type == TREE_TYPE (arg0))
11571 return pedantic_non_lvalue (arg0);
11573 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11574 over COND_EXPR in cases such as floating point comparisons. */
11575 if (integer_zerop (op1)
11576 && integer_onep (op2)
11577 && truth_value_p (TREE_CODE (arg0)))
11578 return pedantic_non_lvalue (fold_convert (type,
11579 invert_truthvalue (arg0)));
11581 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11582 if (TREE_CODE (arg0) == LT_EXPR
11583 && integer_zerop (TREE_OPERAND (arg0, 1))
11584 && integer_zerop (op2)
11585 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11587 /* sign_bit_p only checks ARG1 bits within A's precision.
11588 If <sign bit of A> has wider type than A, bits outside
11589 of A's precision in <sign bit of A> need to be checked.
11590 If they are all 0, this optimization needs to be done
11591 in unsigned A's type, if they are all 1 in signed A's type,
11592 otherwise this can't be done. */
11593 if (TYPE_PRECISION (TREE_TYPE (tem))
11594 < TYPE_PRECISION (TREE_TYPE (arg1))
11595 && TYPE_PRECISION (TREE_TYPE (tem))
11596 < TYPE_PRECISION (type))
11598 unsigned HOST_WIDE_INT mask_lo;
11599 HOST_WIDE_INT mask_hi;
11600 int inner_width, outer_width;
11603 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11604 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11605 if (outer_width > TYPE_PRECISION (type))
11606 outer_width = TYPE_PRECISION (type);
11608 if (outer_width > HOST_BITS_PER_WIDE_INT)
11610 mask_hi = ((unsigned HOST_WIDE_INT) -1
11611 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
11617 mask_lo = ((unsigned HOST_WIDE_INT) -1
11618 >> (HOST_BITS_PER_WIDE_INT - outer_width));
11620 if (inner_width > HOST_BITS_PER_WIDE_INT)
11622 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
11623 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11627 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
11628 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11630 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
11631 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
11633 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
11634 tem = fold_convert (tem_type, tem);
11636 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
11637 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
11639 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
11640 tem = fold_convert (tem_type, tem);
11647 return fold_convert (type,
11648 fold_build2 (BIT_AND_EXPR,
11649 TREE_TYPE (tem), tem,
11650 fold_convert (TREE_TYPE (tem),
11654 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11655 already handled above. */
11656 if (TREE_CODE (arg0) == BIT_AND_EXPR
11657 && integer_onep (TREE_OPERAND (arg0, 1))
11658 && integer_zerop (op2)
11659 && integer_pow2p (arg1))
11661 tree tem = TREE_OPERAND (arg0, 0);
11663 if (TREE_CODE (tem) == RSHIFT_EXPR
11664 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11665 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11666 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11667 return fold_build2 (BIT_AND_EXPR, type,
11668 TREE_OPERAND (tem, 0), arg1);
11671 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11672 is probably obsolete because the first operand should be a
11673 truth value (that's why we have the two cases above), but let's
11674 leave it in until we can confirm this for all front-ends. */
11675 if (integer_zerop (op2)
11676 && TREE_CODE (arg0) == NE_EXPR
11677 && integer_zerop (TREE_OPERAND (arg0, 1))
11678 && integer_pow2p (arg1)
11679 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11680 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11681 arg1, OEP_ONLY_CONST))
11682 return pedantic_non_lvalue (fold_convert (type,
11683 TREE_OPERAND (arg0, 0)));
11685 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11686 if (integer_zerop (op2)
11687 && truth_value_p (TREE_CODE (arg0))
11688 && truth_value_p (TREE_CODE (arg1)))
11689 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11690 fold_convert (type, arg0),
11693 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11694 if (integer_onep (op2)
11695 && truth_value_p (TREE_CODE (arg0))
11696 && truth_value_p (TREE_CODE (arg1)))
11698 /* Only perform transformation if ARG0 is easily inverted. */
11699 tem = fold_truth_not_expr (arg0);
11701 return fold_build2 (TRUTH_ORIF_EXPR, type,
11702 fold_convert (type, tem),
11706 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11707 if (integer_zerop (arg1)
11708 && truth_value_p (TREE_CODE (arg0))
11709 && truth_value_p (TREE_CODE (op2)))
11711 /* Only perform transformation if ARG0 is easily inverted. */
11712 tem = fold_truth_not_expr (arg0);
11714 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11715 fold_convert (type, tem),
11719 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11720 if (integer_onep (arg1)
11721 && truth_value_p (TREE_CODE (arg0))
11722 && truth_value_p (TREE_CODE (op2)))
11723 return fold_build2 (TRUTH_ORIF_EXPR, type,
11724 fold_convert (type, arg0),
11730 /* Check for a built-in function. */
11731 if (TREE_CODE (op0) == ADDR_EXPR
11732 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11733 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11734 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11737 case BIT_FIELD_REF:
11738 if (TREE_CODE (arg0) == VECTOR_CST
11739 && type == TREE_TYPE (TREE_TYPE (arg0))
11740 && host_integerp (arg1, 1)
11741 && host_integerp (op2, 1))
11743 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11744 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11747 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11748 && (idx % width) == 0
11749 && (idx = idx / width)
11750 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11752 tree elements = TREE_VECTOR_CST_ELTS (arg0);
11753 while (idx-- > 0 && elements)
11754 elements = TREE_CHAIN (elements);
11756 return TREE_VALUE (elements);
11758 return fold_convert (type, integer_zero_node);
11765 } /* switch (code) */
11768 /* Perform constant folding and related simplification of EXPR.
11769 The related simplifications include x*1 => x, x*0 => 0, etc.,
11770 and application of the associative law.
11771 NOP_EXPR conversions may be removed freely (as long as we
11772 are careful not to change the type of the overall expression).
11773 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11774 but we can constant-fold them if they have constant operands. */
11776 #ifdef ENABLE_FOLD_CHECKING
11777 # define fold(x) fold_1 (x)
11778 static tree fold_1 (tree);
11784 const tree t = expr;
11785 enum tree_code code = TREE_CODE (t);
11786 enum tree_code_class kind = TREE_CODE_CLASS (code);
11789 /* Return right away if a constant. */
11790 if (kind == tcc_constant)
11793 if (IS_EXPR_CODE_CLASS (kind))
11795 tree type = TREE_TYPE (t);
11796 tree op0, op1, op2;
11798 switch (TREE_CODE_LENGTH (code))
11801 op0 = TREE_OPERAND (t, 0);
11802 tem = fold_unary (code, type, op0);
11803 return tem ? tem : expr;
11805 op0 = TREE_OPERAND (t, 0);
11806 op1 = TREE_OPERAND (t, 1);
11807 tem = fold_binary (code, type, op0, op1);
11808 return tem ? tem : expr;
11810 op0 = TREE_OPERAND (t, 0);
11811 op1 = TREE_OPERAND (t, 1);
11812 op2 = TREE_OPERAND (t, 2);
11813 tem = fold_ternary (code, type, op0, op1, op2);
11814 return tem ? tem : expr;
11823 return fold (DECL_INITIAL (t));
11827 } /* switch (code) */
11830 #ifdef ENABLE_FOLD_CHECKING
11833 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
11834 static void fold_check_failed (tree, tree);
11835 void print_fold_checksum (tree);
11837 /* When --enable-checking=fold, compute a digest of expr before
11838 and after actual fold call to see if fold did not accidentally
11839 change original expr. */
11845 struct md5_ctx ctx;
11846 unsigned char checksum_before[16], checksum_after[16];
11849 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11850 md5_init_ctx (&ctx);
11851 fold_checksum_tree (expr, &ctx, ht);
11852 md5_finish_ctx (&ctx, checksum_before);
11855 ret = fold_1 (expr);
11857 md5_init_ctx (&ctx);
11858 fold_checksum_tree (expr, &ctx, ht);
11859 md5_finish_ctx (&ctx, checksum_after);
11862 if (memcmp (checksum_before, checksum_after, 16))
11863 fold_check_failed (expr, ret);
11869 print_fold_checksum (tree expr)
11871 struct md5_ctx ctx;
11872 unsigned char checksum[16], cnt;
11875 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11876 md5_init_ctx (&ctx);
11877 fold_checksum_tree (expr, &ctx, ht);
11878 md5_finish_ctx (&ctx, checksum);
11880 for (cnt = 0; cnt < 16; ++cnt)
11881 fprintf (stderr, "%02x", checksum[cnt]);
11882 putc ('\n', stderr);
11886 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
11888 internal_error ("fold check: original tree changed by fold");
11892 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
11895 enum tree_code code;
11896 struct tree_function_decl buf;
11901 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
11902 <= sizeof (struct tree_function_decl))
11903 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
11906 slot = htab_find_slot (ht, expr, INSERT);
11910 code = TREE_CODE (expr);
11911 if (TREE_CODE_CLASS (code) == tcc_declaration
11912 && DECL_ASSEMBLER_NAME_SET_P (expr))
11914 /* Allow DECL_ASSEMBLER_NAME to be modified. */
11915 memcpy ((char *) &buf, expr, tree_size (expr));
11916 expr = (tree) &buf;
11917 SET_DECL_ASSEMBLER_NAME (expr, NULL);
11919 else if (TREE_CODE_CLASS (code) == tcc_type
11920 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
11921 || TYPE_CACHED_VALUES_P (expr)
11922 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
11924 /* Allow these fields to be modified. */
11925 memcpy ((char *) &buf, expr, tree_size (expr));
11926 expr = (tree) &buf;
11927 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
11928 TYPE_POINTER_TO (expr) = NULL;
11929 TYPE_REFERENCE_TO (expr) = NULL;
11930 if (TYPE_CACHED_VALUES_P (expr))
11932 TYPE_CACHED_VALUES_P (expr) = 0;
11933 TYPE_CACHED_VALUES (expr) = NULL;
11936 md5_process_bytes (expr, tree_size (expr), ctx);
11937 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
11938 if (TREE_CODE_CLASS (code) != tcc_type
11939 && TREE_CODE_CLASS (code) != tcc_declaration
11940 && code != TREE_LIST)
11941 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
11942 switch (TREE_CODE_CLASS (code))
11948 md5_process_bytes (TREE_STRING_POINTER (expr),
11949 TREE_STRING_LENGTH (expr), ctx);
11952 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
11953 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
11956 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
11962 case tcc_exceptional:
11966 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
11967 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
11968 expr = TREE_CHAIN (expr);
11969 goto recursive_label;
11972 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
11973 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
11979 case tcc_expression:
11980 case tcc_reference:
11981 case tcc_comparison:
11984 case tcc_statement:
11985 len = TREE_CODE_LENGTH (code);
11986 for (i = 0; i < len; ++i)
11987 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
11989 case tcc_declaration:
11990 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
11991 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
11992 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
11994 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
11995 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
11996 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
11997 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
11998 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12000 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
12001 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
12003 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12005 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12006 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12007 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
12011 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12012 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12013 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12014 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12015 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12016 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12017 if (INTEGRAL_TYPE_P (expr)
12018 || SCALAR_FLOAT_TYPE_P (expr))
12020 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12021 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12023 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12024 if (TREE_CODE (expr) == RECORD_TYPE
12025 || TREE_CODE (expr) == UNION_TYPE
12026 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12027 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12028 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12037 /* Fold a unary tree expression with code CODE of type TYPE with an
12038 operand OP0. Return a folded expression if successful. Otherwise,
12039 return a tree expression with code CODE of type TYPE with an
12043 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12046 #ifdef ENABLE_FOLD_CHECKING
12047 unsigned char checksum_before[16], checksum_after[16];
12048 struct md5_ctx ctx;
12051 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12052 md5_init_ctx (&ctx);
12053 fold_checksum_tree (op0, &ctx, ht);
12054 md5_finish_ctx (&ctx, checksum_before);
12058 tem = fold_unary (code, type, op0);
12060 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
12062 #ifdef ENABLE_FOLD_CHECKING
12063 md5_init_ctx (&ctx);
12064 fold_checksum_tree (op0, &ctx, ht);
12065 md5_finish_ctx (&ctx, checksum_after);
12068 if (memcmp (checksum_before, checksum_after, 16))
12069 fold_check_failed (op0, tem);
12074 /* Fold a binary tree expression with code CODE of type TYPE with
12075 operands OP0 and OP1. Return a folded expression if successful.
12076 Otherwise, return a tree expression with code CODE of type TYPE
12077 with operands OP0 and OP1. */
12080 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
12084 #ifdef ENABLE_FOLD_CHECKING
12085 unsigned char checksum_before_op0[16],
12086 checksum_before_op1[16],
12087 checksum_after_op0[16],
12088 checksum_after_op1[16];
12089 struct md5_ctx ctx;
12092 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12093 md5_init_ctx (&ctx);
12094 fold_checksum_tree (op0, &ctx, ht);
12095 md5_finish_ctx (&ctx, checksum_before_op0);
12098 md5_init_ctx (&ctx);
12099 fold_checksum_tree (op1, &ctx, ht);
12100 md5_finish_ctx (&ctx, checksum_before_op1);
12104 tem = fold_binary (code, type, op0, op1);
12106 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
12108 #ifdef ENABLE_FOLD_CHECKING
12109 md5_init_ctx (&ctx);
12110 fold_checksum_tree (op0, &ctx, ht);
12111 md5_finish_ctx (&ctx, checksum_after_op0);
12114 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12115 fold_check_failed (op0, tem);
12117 md5_init_ctx (&ctx);
12118 fold_checksum_tree (op1, &ctx, ht);
12119 md5_finish_ctx (&ctx, checksum_after_op1);
12122 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12123 fold_check_failed (op1, tem);
12128 /* Fold a ternary tree expression with code CODE of type TYPE with
12129 operands OP0, OP1, and OP2. Return a folded expression if
12130 successful. Otherwise, return a tree expression with code CODE of
12131 type TYPE with operands OP0, OP1, and OP2. */
12134 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
12138 #ifdef ENABLE_FOLD_CHECKING
12139 unsigned char checksum_before_op0[16],
12140 checksum_before_op1[16],
12141 checksum_before_op2[16],
12142 checksum_after_op0[16],
12143 checksum_after_op1[16],
12144 checksum_after_op2[16];
12145 struct md5_ctx ctx;
12148 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12149 md5_init_ctx (&ctx);
12150 fold_checksum_tree (op0, &ctx, ht);
12151 md5_finish_ctx (&ctx, checksum_before_op0);
12154 md5_init_ctx (&ctx);
12155 fold_checksum_tree (op1, &ctx, ht);
12156 md5_finish_ctx (&ctx, checksum_before_op1);
12159 md5_init_ctx (&ctx);
12160 fold_checksum_tree (op2, &ctx, ht);
12161 md5_finish_ctx (&ctx, checksum_before_op2);
12165 tem = fold_ternary (code, type, op0, op1, op2);
12167 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
12169 #ifdef ENABLE_FOLD_CHECKING
12170 md5_init_ctx (&ctx);
12171 fold_checksum_tree (op0, &ctx, ht);
12172 md5_finish_ctx (&ctx, checksum_after_op0);
12175 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12176 fold_check_failed (op0, tem);
12178 md5_init_ctx (&ctx);
12179 fold_checksum_tree (op1, &ctx, ht);
12180 md5_finish_ctx (&ctx, checksum_after_op1);
12183 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12184 fold_check_failed (op1, tem);
12186 md5_init_ctx (&ctx);
12187 fold_checksum_tree (op2, &ctx, ht);
12188 md5_finish_ctx (&ctx, checksum_after_op2);
12191 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12192 fold_check_failed (op2, tem);
12197 /* Perform constant folding and related simplification of initializer
12198 expression EXPR. These behave identically to "fold_buildN" but ignore
12199 potential run-time traps and exceptions that fold must preserve. */
12201 #define START_FOLD_INIT \
12202 int saved_signaling_nans = flag_signaling_nans;\
12203 int saved_trapping_math = flag_trapping_math;\
12204 int saved_rounding_math = flag_rounding_math;\
12205 int saved_trapv = flag_trapv;\
12206 int saved_folding_initializer = folding_initializer;\
12207 flag_signaling_nans = 0;\
12208 flag_trapping_math = 0;\
12209 flag_rounding_math = 0;\
12211 folding_initializer = 1;
12213 #define END_FOLD_INIT \
12214 flag_signaling_nans = saved_signaling_nans;\
12215 flag_trapping_math = saved_trapping_math;\
12216 flag_rounding_math = saved_rounding_math;\
12217 flag_trapv = saved_trapv;\
12218 folding_initializer = saved_folding_initializer;
12221 fold_build1_initializer (enum tree_code code, tree type, tree op)
12226 result = fold_build1 (code, type, op);
12233 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
12238 result = fold_build2 (code, type, op0, op1);
12245 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
12251 result = fold_build3 (code, type, op0, op1, op2);
12257 #undef START_FOLD_INIT
12258 #undef END_FOLD_INIT
12260 /* Determine if first argument is a multiple of second argument. Return 0 if
12261 it is not, or we cannot easily determined it to be.
12263 An example of the sort of thing we care about (at this point; this routine
12264 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12265 fold cases do now) is discovering that
12267 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12273 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12275 This code also handles discovering that
12277 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12279 is a multiple of 8 so we don't have to worry about dealing with a
12280 possible remainder.
12282 Note that we *look* inside a SAVE_EXPR only to determine how it was
12283 calculated; it is not safe for fold to do much of anything else with the
12284 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12285 at run time. For example, the latter example above *cannot* be implemented
12286 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12287 evaluation time of the original SAVE_EXPR is not necessarily the same at
12288 the time the new expression is evaluated. The only optimization of this
12289 sort that would be valid is changing
12291 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12295 SAVE_EXPR (I) * SAVE_EXPR (J)
12297 (where the same SAVE_EXPR (J) is used in the original and the
12298 transformed version). */
12301 multiple_of_p (tree type, tree top, tree bottom)
12303 if (operand_equal_p (top, bottom, 0))
12306 if (TREE_CODE (type) != INTEGER_TYPE)
12309 switch (TREE_CODE (top))
12312 /* Bitwise and provides a power of two multiple. If the mask is
12313 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12314 if (!integer_pow2p (bottom))
12319 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12320 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12324 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12325 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12328 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12332 op1 = TREE_OPERAND (top, 1);
12333 /* const_binop may not detect overflow correctly,
12334 so check for it explicitly here. */
12335 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
12336 > TREE_INT_CST_LOW (op1)
12337 && TREE_INT_CST_HIGH (op1) == 0
12338 && 0 != (t1 = fold_convert (type,
12339 const_binop (LSHIFT_EXPR,
12342 && ! TREE_OVERFLOW (t1))
12343 return multiple_of_p (type, t1, bottom);
12348 /* Can't handle conversions from non-integral or wider integral type. */
12349 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12350 || (TYPE_PRECISION (type)
12351 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12354 /* .. fall through ... */
12357 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12360 if (TREE_CODE (bottom) != INTEGER_CST
12361 || (TYPE_UNSIGNED (type)
12362 && (tree_int_cst_sgn (top) < 0
12363 || tree_int_cst_sgn (bottom) < 0)))
12365 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
12373 /* Return true if `t' is known to be non-negative. If the return
12374 value is based on the assumption that signed overflow is undefined,
12375 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12376 *STRICT_OVERFLOW_P. */
12379 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
12381 if (t == error_mark_node)
12384 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12387 switch (TREE_CODE (t))
12390 /* Query VRP to see if it has recorded any information about
12391 the range of this object. */
12392 return ssa_name_nonnegative_p (t);
12395 /* We can't return 1 if flag_wrapv is set because
12396 ABS_EXPR<INT_MIN> = INT_MIN. */
12397 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
12399 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
12401 *strict_overflow_p = true;
12407 return tree_int_cst_sgn (t) >= 0;
12410 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12413 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12414 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12416 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12417 strict_overflow_p));
12419 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12420 both unsigned and at least 2 bits shorter than the result. */
12421 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12422 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12423 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12425 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12426 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12427 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12428 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12430 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12431 TYPE_PRECISION (inner2)) + 1;
12432 return prec < TYPE_PRECISION (TREE_TYPE (t));
12438 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12440 /* x * x for floating point x is always non-negative. */
12441 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
12443 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12445 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12446 strict_overflow_p));
12449 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12450 both unsigned and their total bits is shorter than the result. */
12451 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12452 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12453 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12455 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12456 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12457 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12458 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12459 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
12460 < TYPE_PRECISION (TREE_TYPE (t));
12466 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12468 || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12469 strict_overflow_p));
12475 case TRUNC_DIV_EXPR:
12476 case CEIL_DIV_EXPR:
12477 case FLOOR_DIV_EXPR:
12478 case ROUND_DIV_EXPR:
12479 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12481 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12482 strict_overflow_p));
12484 case TRUNC_MOD_EXPR:
12485 case CEIL_MOD_EXPR:
12486 case FLOOR_MOD_EXPR:
12487 case ROUND_MOD_EXPR:
12489 case NON_LVALUE_EXPR:
12491 case FIX_TRUNC_EXPR:
12492 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12493 strict_overflow_p);
12495 case COMPOUND_EXPR:
12497 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12498 strict_overflow_p);
12501 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
12502 strict_overflow_p);
12505 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12507 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
12508 strict_overflow_p));
12512 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12513 tree outer_type = TREE_TYPE (t);
12515 if (TREE_CODE (outer_type) == REAL_TYPE)
12517 if (TREE_CODE (inner_type) == REAL_TYPE)
12518 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12519 strict_overflow_p);
12520 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12522 if (TYPE_UNSIGNED (inner_type))
12524 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12525 strict_overflow_p);
12528 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
12530 if (TREE_CODE (inner_type) == REAL_TYPE)
12531 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t,0),
12532 strict_overflow_p);
12533 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12534 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12535 && TYPE_UNSIGNED (inner_type);
12542 tree temp = TARGET_EXPR_SLOT (t);
12543 t = TARGET_EXPR_INITIAL (t);
12545 /* If the initializer is non-void, then it's a normal expression
12546 that will be assigned to the slot. */
12547 if (!VOID_TYPE_P (t))
12548 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
12550 /* Otherwise, the initializer sets the slot in some way. One common
12551 way is an assignment statement at the end of the initializer. */
12554 if (TREE_CODE (t) == BIND_EXPR)
12555 t = expr_last (BIND_EXPR_BODY (t));
12556 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12557 || TREE_CODE (t) == TRY_CATCH_EXPR)
12558 t = expr_last (TREE_OPERAND (t, 0));
12559 else if (TREE_CODE (t) == STATEMENT_LIST)
12564 if (TREE_CODE (t) == MODIFY_EXPR
12565 && TREE_OPERAND (t, 0) == temp)
12566 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12567 strict_overflow_p);
12574 tree fndecl = get_callee_fndecl (t);
12575 tree arglist = TREE_OPERAND (t, 1);
12576 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12577 switch (DECL_FUNCTION_CODE (fndecl))
12579 CASE_FLT_FN (BUILT_IN_ACOS):
12580 CASE_FLT_FN (BUILT_IN_ACOSH):
12581 CASE_FLT_FN (BUILT_IN_CABS):
12582 CASE_FLT_FN (BUILT_IN_COSH):
12583 CASE_FLT_FN (BUILT_IN_ERFC):
12584 CASE_FLT_FN (BUILT_IN_EXP):
12585 CASE_FLT_FN (BUILT_IN_EXP10):
12586 CASE_FLT_FN (BUILT_IN_EXP2):
12587 CASE_FLT_FN (BUILT_IN_FABS):
12588 CASE_FLT_FN (BUILT_IN_FDIM):
12589 CASE_FLT_FN (BUILT_IN_HYPOT):
12590 CASE_FLT_FN (BUILT_IN_POW10):
12591 CASE_INT_FN (BUILT_IN_FFS):
12592 CASE_INT_FN (BUILT_IN_PARITY):
12593 CASE_INT_FN (BUILT_IN_POPCOUNT):
12594 case BUILT_IN_BSWAP32:
12595 case BUILT_IN_BSWAP64:
12599 CASE_FLT_FN (BUILT_IN_SQRT):
12600 /* sqrt(-0.0) is -0.0. */
12601 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
12603 return tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12604 strict_overflow_p);
12606 CASE_FLT_FN (BUILT_IN_ASINH):
12607 CASE_FLT_FN (BUILT_IN_ATAN):
12608 CASE_FLT_FN (BUILT_IN_ATANH):
12609 CASE_FLT_FN (BUILT_IN_CBRT):
12610 CASE_FLT_FN (BUILT_IN_CEIL):
12611 CASE_FLT_FN (BUILT_IN_ERF):
12612 CASE_FLT_FN (BUILT_IN_EXPM1):
12613 CASE_FLT_FN (BUILT_IN_FLOOR):
12614 CASE_FLT_FN (BUILT_IN_FMOD):
12615 CASE_FLT_FN (BUILT_IN_FREXP):
12616 CASE_FLT_FN (BUILT_IN_LCEIL):
12617 CASE_FLT_FN (BUILT_IN_LDEXP):
12618 CASE_FLT_FN (BUILT_IN_LFLOOR):
12619 CASE_FLT_FN (BUILT_IN_LLCEIL):
12620 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12621 CASE_FLT_FN (BUILT_IN_LLRINT):
12622 CASE_FLT_FN (BUILT_IN_LLROUND):
12623 CASE_FLT_FN (BUILT_IN_LRINT):
12624 CASE_FLT_FN (BUILT_IN_LROUND):
12625 CASE_FLT_FN (BUILT_IN_MODF):
12626 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12627 CASE_FLT_FN (BUILT_IN_POW):
12628 CASE_FLT_FN (BUILT_IN_RINT):
12629 CASE_FLT_FN (BUILT_IN_ROUND):
12630 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12631 CASE_FLT_FN (BUILT_IN_SINH):
12632 CASE_FLT_FN (BUILT_IN_TANH):
12633 CASE_FLT_FN (BUILT_IN_TRUNC):
12634 /* True if the 1st argument is nonnegative. */
12635 return tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12636 strict_overflow_p);
12638 CASE_FLT_FN (BUILT_IN_FMAX):
12639 /* True if the 1st OR 2nd arguments are nonnegative. */
12640 return (tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12642 || (tree_expr_nonnegative_warnv_p
12643 (TREE_VALUE (TREE_CHAIN (arglist)),
12644 strict_overflow_p)));
12646 CASE_FLT_FN (BUILT_IN_FMIN):
12647 /* True if the 1st AND 2nd arguments are nonnegative. */
12648 return (tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12650 && (tree_expr_nonnegative_warnv_p
12651 (TREE_VALUE (TREE_CHAIN (arglist)),
12652 strict_overflow_p)));
12654 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12655 /* True if the 2nd argument is nonnegative. */
12656 return (tree_expr_nonnegative_warnv_p
12657 (TREE_VALUE (TREE_CHAIN (arglist)),
12658 strict_overflow_p));
12665 /* ... fall through ... */
12669 tree type = TREE_TYPE (t);
12670 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12671 && truth_value_p (TREE_CODE (t)))
12672 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12673 have a signed:1 type (where the value is -1 and 0). */
12678 /* We don't know sign of `t', so be conservative and return false. */
12682 /* Return true if `t' is known to be non-negative. Handle warnings
12683 about undefined signed overflow. */
12686 tree_expr_nonnegative_p (tree t)
12689 bool strict_overflow_p;
12691 strict_overflow_p = false;
12692 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
12693 if (strict_overflow_p)
12694 fold_overflow_warning (("assuming signed overflow does not occur when "
12695 "determining that expression is always "
12697 WARN_STRICT_OVERFLOW_MISC);
12701 /* Return true when T is an address and is known to be nonzero.
12702 For floating point we further ensure that T is not denormal.
12703 Similar logic is present in nonzero_address in rtlanal.h.
12705 If the return value is based on the assumption that signed overflow
12706 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
12707 change *STRICT_OVERFLOW_P. */
12710 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
12712 tree type = TREE_TYPE (t);
12713 bool sub_strict_overflow_p;
12715 /* Doing something useful for floating point would need more work. */
12716 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12719 switch (TREE_CODE (t))
12722 /* Query VRP to see if it has recorded any information about
12723 the range of this object. */
12724 return ssa_name_nonzero_p (t);
12727 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12728 strict_overflow_p);
12731 /* We used to test for !integer_zerop here. This does not work correctly
12732 if TREE_CONSTANT_OVERFLOW (t). */
12733 return (TREE_INT_CST_LOW (t) != 0
12734 || TREE_INT_CST_HIGH (t) != 0);
12737 if (TYPE_OVERFLOW_UNDEFINED (type))
12739 /* With the presence of negative values it is hard
12740 to say something. */
12741 sub_strict_overflow_p = false;
12742 if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12743 &sub_strict_overflow_p)
12744 || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12745 &sub_strict_overflow_p))
12747 /* One of operands must be positive and the other non-negative. */
12748 /* We don't set *STRICT_OVERFLOW_P here: even if this value
12749 overflows, on a twos-complement machine the sum of two
12750 nonnegative numbers can never be zero. */
12751 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12753 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12754 strict_overflow_p));
12759 if (TYPE_OVERFLOW_UNDEFINED (type))
12761 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12763 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12764 strict_overflow_p))
12766 *strict_overflow_p = true;
12774 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12775 tree outer_type = TREE_TYPE (t);
12777 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12778 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12779 strict_overflow_p));
12785 tree base = get_base_address (TREE_OPERAND (t, 0));
12790 /* Weak declarations may link to NULL. */
12791 if (VAR_OR_FUNCTION_DECL_P (base))
12792 return !DECL_WEAK (base);
12794 /* Constants are never weak. */
12795 if (CONSTANT_CLASS_P (base))
12802 sub_strict_overflow_p = false;
12803 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12804 &sub_strict_overflow_p)
12805 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
12806 &sub_strict_overflow_p))
12808 if (sub_strict_overflow_p)
12809 *strict_overflow_p = true;
12815 sub_strict_overflow_p = false;
12816 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12817 &sub_strict_overflow_p)
12818 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12819 &sub_strict_overflow_p))
12821 if (sub_strict_overflow_p)
12822 *strict_overflow_p = true;
12827 sub_strict_overflow_p = false;
12828 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12829 &sub_strict_overflow_p))
12831 if (sub_strict_overflow_p)
12832 *strict_overflow_p = true;
12834 /* When both operands are nonzero, then MAX must be too. */
12835 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12836 strict_overflow_p))
12839 /* MAX where operand 0 is positive is positive. */
12840 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12841 strict_overflow_p);
12843 /* MAX where operand 1 is positive is positive. */
12844 else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12845 &sub_strict_overflow_p)
12846 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12847 &sub_strict_overflow_p))
12849 if (sub_strict_overflow_p)
12850 *strict_overflow_p = true;
12855 case COMPOUND_EXPR:
12858 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12859 strict_overflow_p);
12862 case NON_LVALUE_EXPR:
12863 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12864 strict_overflow_p);
12867 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12869 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12870 strict_overflow_p));
12873 return alloca_call_p (t);
12881 /* Return true when T is an address and is known to be nonzero.
12882 Handle warnings about undefined signed overflow. */
12885 tree_expr_nonzero_p (tree t)
12887 bool ret, strict_overflow_p;
12889 strict_overflow_p = false;
12890 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
12891 if (strict_overflow_p)
12892 fold_overflow_warning (("assuming signed overflow does not occur when "
12893 "determining that expression is always "
12895 WARN_STRICT_OVERFLOW_MISC);
12899 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12900 attempt to fold the expression to a constant without modifying TYPE,
12903 If the expression could be simplified to a constant, then return
12904 the constant. If the expression would not be simplified to a
12905 constant, then return NULL_TREE. */
12908 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12910 tree tem = fold_binary (code, type, op0, op1);
12911 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12914 /* Given the components of a unary expression CODE, TYPE and OP0,
12915 attempt to fold the expression to a constant without modifying
12918 If the expression could be simplified to a constant, then return
12919 the constant. If the expression would not be simplified to a
12920 constant, then return NULL_TREE. */
12923 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
12925 tree tem = fold_unary (code, type, op0);
12926 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12929 /* If EXP represents referencing an element in a constant string
12930 (either via pointer arithmetic or array indexing), return the
12931 tree representing the value accessed, otherwise return NULL. */
12934 fold_read_from_constant_string (tree exp)
12936 if ((TREE_CODE (exp) == INDIRECT_REF
12937 || TREE_CODE (exp) == ARRAY_REF)
12938 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
12940 tree exp1 = TREE_OPERAND (exp, 0);
12944 if (TREE_CODE (exp) == INDIRECT_REF)
12945 string = string_constant (exp1, &index);
12948 tree low_bound = array_ref_low_bound (exp);
12949 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
12951 /* Optimize the special-case of a zero lower bound.
12953 We convert the low_bound to sizetype to avoid some problems
12954 with constant folding. (E.g. suppose the lower bound is 1,
12955 and its mode is QI. Without the conversion,l (ARRAY
12956 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
12957 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
12958 if (! integer_zerop (low_bound))
12959 index = size_diffop (index, fold_convert (sizetype, low_bound));
12965 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
12966 && TREE_CODE (string) == STRING_CST
12967 && TREE_CODE (index) == INTEGER_CST
12968 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
12969 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
12971 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
12972 return fold_convert (TREE_TYPE (exp),
12973 build_int_cst (NULL_TREE,
12974 (TREE_STRING_POINTER (string)
12975 [TREE_INT_CST_LOW (index)])));
12980 /* Return the tree for neg (ARG0) when ARG0 is known to be either
12981 an integer constant or real constant.
12983 TYPE is the type of the result. */
12986 fold_negate_const (tree arg0, tree type)
12988 tree t = NULL_TREE;
12990 switch (TREE_CODE (arg0))
12994 unsigned HOST_WIDE_INT low;
12995 HOST_WIDE_INT high;
12996 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12997 TREE_INT_CST_HIGH (arg0),
12999 t = build_int_cst_wide (type, low, high);
13000 t = force_fit_type (t, 1,
13001 (overflow | TREE_OVERFLOW (arg0))
13002 && !TYPE_UNSIGNED (type),
13003 TREE_CONSTANT_OVERFLOW (arg0));
13008 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13012 gcc_unreachable ();
13018 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13019 an integer constant or real constant.
13021 TYPE is the type of the result. */
13024 fold_abs_const (tree arg0, tree type)
13026 tree t = NULL_TREE;
13028 switch (TREE_CODE (arg0))
13031 /* If the value is unsigned, then the absolute value is
13032 the same as the ordinary value. */
13033 if (TYPE_UNSIGNED (type))
13035 /* Similarly, if the value is non-negative. */
13036 else if (INT_CST_LT (integer_minus_one_node, arg0))
13038 /* If the value is negative, then the absolute value is
13042 unsigned HOST_WIDE_INT low;
13043 HOST_WIDE_INT high;
13044 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13045 TREE_INT_CST_HIGH (arg0),
13047 t = build_int_cst_wide (type, low, high);
13048 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
13049 TREE_CONSTANT_OVERFLOW (arg0));
13054 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13055 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13061 gcc_unreachable ();
13067 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13068 constant. TYPE is the type of the result. */
13071 fold_not_const (tree arg0, tree type)
13073 tree t = NULL_TREE;
13075 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13077 t = build_int_cst_wide (type,
13078 ~ TREE_INT_CST_LOW (arg0),
13079 ~ TREE_INT_CST_HIGH (arg0));
13080 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
13081 TREE_CONSTANT_OVERFLOW (arg0));
13086 /* Given CODE, a relational operator, the target type, TYPE and two
13087 constant operands OP0 and OP1, return the result of the
13088 relational operation. If the result is not a compile time
13089 constant, then return NULL_TREE. */
13092 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13094 int result, invert;
13096 /* From here on, the only cases we handle are when the result is
13097 known to be a constant. */
13099 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13101 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13102 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13104 /* Handle the cases where either operand is a NaN. */
13105 if (real_isnan (c0) || real_isnan (c1))
13115 case UNORDERED_EXPR:
13129 if (flag_trapping_math)
13135 gcc_unreachable ();
13138 return constant_boolean_node (result, type);
13141 return constant_boolean_node (real_compare (code, c0, c1), type);
13144 /* Handle equality/inequality of complex constants. */
13145 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13147 tree rcond = fold_relational_const (code, type,
13148 TREE_REALPART (op0),
13149 TREE_REALPART (op1));
13150 tree icond = fold_relational_const (code, type,
13151 TREE_IMAGPART (op0),
13152 TREE_IMAGPART (op1));
13153 if (code == EQ_EXPR)
13154 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13155 else if (code == NE_EXPR)
13156 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13161 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13163 To compute GT, swap the arguments and do LT.
13164 To compute GE, do LT and invert the result.
13165 To compute LE, swap the arguments, do LT and invert the result.
13166 To compute NE, do EQ and invert the result.
13168 Therefore, the code below must handle only EQ and LT. */
13170 if (code == LE_EXPR || code == GT_EXPR)
13175 code = swap_tree_comparison (code);
13178 /* Note that it is safe to invert for real values here because we
13179 have already handled the one case that it matters. */
13182 if (code == NE_EXPR || code == GE_EXPR)
13185 code = invert_tree_comparison (code, false);
13188 /* Compute a result for LT or EQ if args permit;
13189 Otherwise return T. */
13190 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13192 if (code == EQ_EXPR)
13193 result = tree_int_cst_equal (op0, op1);
13194 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
13195 result = INT_CST_LT_UNSIGNED (op0, op1);
13197 result = INT_CST_LT (op0, op1);
13204 return constant_boolean_node (result, type);
13207 /* Build an expression for the a clean point containing EXPR with type TYPE.
13208 Don't build a cleanup point expression for EXPR which don't have side
13212 fold_build_cleanup_point_expr (tree type, tree expr)
13214 /* If the expression does not have side effects then we don't have to wrap
13215 it with a cleanup point expression. */
13216 if (!TREE_SIDE_EFFECTS (expr))
13219 /* If the expression is a return, check to see if the expression inside the
13220 return has no side effects or the right hand side of the modify expression
13221 inside the return. If either don't have side effects set we don't need to
13222 wrap the expression in a cleanup point expression. Note we don't check the
13223 left hand side of the modify because it should always be a return decl. */
13224 if (TREE_CODE (expr) == RETURN_EXPR)
13226 tree op = TREE_OPERAND (expr, 0);
13227 if (!op || !TREE_SIDE_EFFECTS (op))
13229 op = TREE_OPERAND (op, 1);
13230 if (!TREE_SIDE_EFFECTS (op))
13234 return build1 (CLEANUP_POINT_EXPR, type, expr);
13237 /* Build an expression for the address of T. Folds away INDIRECT_REF to
13238 avoid confusing the gimplify process. */
13241 build_fold_addr_expr_with_type (tree t, tree ptrtype)
13243 /* The size of the object is not relevant when talking about its address. */
13244 if (TREE_CODE (t) == WITH_SIZE_EXPR)
13245 t = TREE_OPERAND (t, 0);
13247 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
13248 if (TREE_CODE (t) == INDIRECT_REF
13249 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
13251 t = TREE_OPERAND (t, 0);
13252 if (TREE_TYPE (t) != ptrtype)
13253 t = build1 (NOP_EXPR, ptrtype, t);
13259 while (handled_component_p (base))
13260 base = TREE_OPERAND (base, 0);
13262 TREE_ADDRESSABLE (base) = 1;
13264 t = build1 (ADDR_EXPR, ptrtype, t);
13271 build_fold_addr_expr (tree t)
13273 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
13276 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13277 of an indirection through OP0, or NULL_TREE if no simplification is
13281 fold_indirect_ref_1 (tree type, tree op0)
13287 subtype = TREE_TYPE (sub);
13288 if (!POINTER_TYPE_P (subtype))
13291 if (TREE_CODE (sub) == ADDR_EXPR)
13293 tree op = TREE_OPERAND (sub, 0);
13294 tree optype = TREE_TYPE (op);
13295 /* *&CONST_DECL -> to the value of the const decl. */
13296 if (TREE_CODE (op) == CONST_DECL)
13297 return DECL_INITIAL (op);
13298 /* *&p => p; make sure to handle *&"str"[cst] here. */
13299 if (type == optype)
13301 tree fop = fold_read_from_constant_string (op);
13307 /* *(foo *)&fooarray => fooarray[0] */
13308 else if (TREE_CODE (optype) == ARRAY_TYPE
13309 && type == TREE_TYPE (optype))
13311 tree type_domain = TYPE_DOMAIN (optype);
13312 tree min_val = size_zero_node;
13313 if (type_domain && TYPE_MIN_VALUE (type_domain))
13314 min_val = TYPE_MIN_VALUE (type_domain);
13315 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
13317 /* *(foo *)&complexfoo => __real__ complexfoo */
13318 else if (TREE_CODE (optype) == COMPLEX_TYPE
13319 && type == TREE_TYPE (optype))
13320 return fold_build1 (REALPART_EXPR, type, op);
13323 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13324 if (TREE_CODE (sub) == PLUS_EXPR
13325 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13327 tree op00 = TREE_OPERAND (sub, 0);
13328 tree op01 = TREE_OPERAND (sub, 1);
13332 op00type = TREE_TYPE (op00);
13333 if (TREE_CODE (op00) == ADDR_EXPR
13334 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
13335 && type == TREE_TYPE (TREE_TYPE (op00type)))
13337 tree size = TYPE_SIZE_UNIT (type);
13338 if (tree_int_cst_equal (size, op01))
13339 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
13343 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13344 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13345 && type == TREE_TYPE (TREE_TYPE (subtype)))
13348 tree min_val = size_zero_node;
13349 sub = build_fold_indirect_ref (sub);
13350 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13351 if (type_domain && TYPE_MIN_VALUE (type_domain))
13352 min_val = TYPE_MIN_VALUE (type_domain);
13353 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
13359 /* Builds an expression for an indirection through T, simplifying some
13363 build_fold_indirect_ref (tree t)
13365 tree type = TREE_TYPE (TREE_TYPE (t));
13366 tree sub = fold_indirect_ref_1 (type, t);
13371 return build1 (INDIRECT_REF, type, t);
13374 /* Given an INDIRECT_REF T, return either T or a simplified version. */
13377 fold_indirect_ref (tree t)
13379 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
13387 /* Strip non-trapping, non-side-effecting tree nodes from an expression
13388 whose result is ignored. The type of the returned tree need not be
13389 the same as the original expression. */
13392 fold_ignored_result (tree t)
13394 if (!TREE_SIDE_EFFECTS (t))
13395 return integer_zero_node;
13398 switch (TREE_CODE_CLASS (TREE_CODE (t)))
13401 t = TREE_OPERAND (t, 0);
13405 case tcc_comparison:
13406 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13407 t = TREE_OPERAND (t, 0);
13408 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
13409 t = TREE_OPERAND (t, 1);
13414 case tcc_expression:
13415 switch (TREE_CODE (t))
13417 case COMPOUND_EXPR:
13418 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13420 t = TREE_OPERAND (t, 0);
13424 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
13425 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
13427 t = TREE_OPERAND (t, 0);
13440 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
13441 This can only be applied to objects of a sizetype. */
13444 round_up (tree value, int divisor)
13446 tree div = NULL_TREE;
13448 gcc_assert (divisor > 0);
13452 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13453 have to do anything. Only do this when we are not given a const,
13454 because in that case, this check is more expensive than just
13456 if (TREE_CODE (value) != INTEGER_CST)
13458 div = build_int_cst (TREE_TYPE (value), divisor);
13460 if (multiple_of_p (TREE_TYPE (value), value, div))
13464 /* If divisor is a power of two, simplify this to bit manipulation. */
13465 if (divisor == (divisor & -divisor))
13469 t = build_int_cst (TREE_TYPE (value), divisor - 1);
13470 value = size_binop (PLUS_EXPR, value, t);
13471 t = build_int_cst (TREE_TYPE (value), -divisor);
13472 value = size_binop (BIT_AND_EXPR, value, t);
13477 div = build_int_cst (TREE_TYPE (value), divisor);
13478 value = size_binop (CEIL_DIV_EXPR, value, div);
13479 value = size_binop (MULT_EXPR, value, div);
13485 /* Likewise, but round down. */
13488 round_down (tree value, int divisor)
13490 tree div = NULL_TREE;
13492 gcc_assert (divisor > 0);
13496 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13497 have to do anything. Only do this when we are not given a const,
13498 because in that case, this check is more expensive than just
13500 if (TREE_CODE (value) != INTEGER_CST)
13502 div = build_int_cst (TREE_TYPE (value), divisor);
13504 if (multiple_of_p (TREE_TYPE (value), value, div))
13508 /* If divisor is a power of two, simplify this to bit manipulation. */
13509 if (divisor == (divisor & -divisor))
13513 t = build_int_cst (TREE_TYPE (value), -divisor);
13514 value = size_binop (BIT_AND_EXPR, value, t);
13519 div = build_int_cst (TREE_TYPE (value), divisor);
13520 value = size_binop (FLOOR_DIV_EXPR, value, div);
13521 value = size_binop (MULT_EXPR, value, div);
13527 /* Returns the pointer to the base of the object addressed by EXP and
13528 extracts the information about the offset of the access, storing it
13529 to PBITPOS and POFFSET. */
13532 split_address_to_core_and_offset (tree exp,
13533 HOST_WIDE_INT *pbitpos, tree *poffset)
13536 enum machine_mode mode;
13537 int unsignedp, volatilep;
13538 HOST_WIDE_INT bitsize;
13540 if (TREE_CODE (exp) == ADDR_EXPR)
13542 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
13543 poffset, &mode, &unsignedp, &volatilep,
13545 core = build_fold_addr_expr (core);
13551 *poffset = NULL_TREE;
13557 /* Returns true if addresses of E1 and E2 differ by a constant, false
13558 otherwise. If they do, E1 - E2 is stored in *DIFF. */
13561 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
13564 HOST_WIDE_INT bitpos1, bitpos2;
13565 tree toffset1, toffset2, tdiff, type;
13567 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
13568 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
13570 if (bitpos1 % BITS_PER_UNIT != 0
13571 || bitpos2 % BITS_PER_UNIT != 0
13572 || !operand_equal_p (core1, core2, 0))
13575 if (toffset1 && toffset2)
13577 type = TREE_TYPE (toffset1);
13578 if (type != TREE_TYPE (toffset2))
13579 toffset2 = fold_convert (type, toffset2);
13581 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
13582 if (!cst_and_fits_in_hwi (tdiff))
13585 *diff = int_cst_value (tdiff);
13587 else if (toffset1 || toffset2)
13589 /* If only one of the offsets is non-constant, the difference cannot
13596 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
13600 /* Simplify the floating point expression EXP when the sign of the
13601 result is not significant. Return NULL_TREE if no simplification
13605 fold_strip_sign_ops (tree exp)
13609 switch (TREE_CODE (exp))
13613 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13614 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
13618 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
13620 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13621 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13622 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
13623 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
13624 arg0 ? arg0 : TREE_OPERAND (exp, 0),
13625 arg1 ? arg1 : TREE_OPERAND (exp, 1));