1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 /*@@ This file should be rewritten to use an arbitrary precision
24 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
25 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
26 @@ The routines that translate from the ap rep should
27 @@ warn if precision et. al. is lost.
28 @@ This would also make life easier when this technology is used
29 @@ for cross-compilers. */
31 /* The entry points in this file are fold, size_int_wide, size_binop
34 fold takes a tree as argument and returns a simplified tree.
36 size_binop takes a tree code for an arithmetic operation
37 and two operands that are trees, and produces a tree for the
38 result, assuming the type comes from `sizetype'.
40 size_int takes an integer value, and creates a tree constant
41 with type from `sizetype'.
43 force_fit_type takes a constant, an overflowable flag and prior
44 overflow indicators. It forces the value to fit the type and sets
45 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
49 #include "coretypes.h"
61 #include "langhooks.h"
64 /* Non-zero if we are folding constants inside an initializer; zero
66 int folding_initializer = 0;
68 /* The following constants represent a bit based encoding of GCC's
69 comparison operators. This encoding simplifies transformations
70 on relational comparison operators, such as AND and OR. */
71 enum comparison_code {
90 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
91 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
92 static bool negate_mathfn_p (enum built_in_function);
93 static bool negate_expr_p (tree);
94 static tree negate_expr (tree);
95 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
96 static tree associate_trees (tree, tree, enum tree_code, tree);
97 static tree const_binop (enum tree_code, tree, tree, int);
98 static enum comparison_code comparison_to_compcode (enum tree_code);
99 static enum tree_code compcode_to_comparison (enum comparison_code);
100 static tree combine_comparisons (enum tree_code, enum tree_code,
101 enum tree_code, tree, tree, tree);
102 static int truth_value_p (enum tree_code);
103 static int operand_equal_for_comparison_p (tree, tree, tree);
104 static int twoval_comparison_p (tree, tree *, tree *, int *);
105 static tree eval_subst (tree, tree, tree, tree, tree);
106 static tree pedantic_omit_one_operand (tree, tree, tree);
107 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
108 static tree make_bit_field_ref (tree, tree, int, int, int);
109 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
110 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
111 enum machine_mode *, int *, int *,
113 static int all_ones_mask_p (tree, int);
114 static tree sign_bit_p (tree, tree);
115 static int simple_operand_p (tree);
116 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
117 static tree range_predecessor (tree);
118 static tree range_successor (tree);
119 static tree make_range (tree, int *, tree *, tree *, bool *);
120 static tree build_range_check (tree, tree, int, tree, tree);
121 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
123 static tree fold_range_test (enum tree_code, tree, tree, tree);
124 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
125 static tree unextend (tree, int, int, tree);
126 static tree fold_truthop (enum tree_code, tree, tree, tree);
127 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
128 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
129 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
130 static int multiple_of_p (tree, tree, tree);
131 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
134 static bool fold_real_zero_addition_p (tree, tree, int);
135 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
137 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
138 static tree fold_div_compare (enum tree_code, tree, tree, tree);
139 static bool reorder_operands_p (tree, tree);
140 static tree fold_negate_const (tree, tree);
141 static tree fold_not_const (tree, tree);
142 static tree fold_relational_const (enum tree_code, tree, tree, tree);
143 static int native_encode_expr (tree, unsigned char *, int);
144 static tree native_interpret_expr (tree, unsigned char *, int);
147 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
148 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
149 and SUM1. Then this yields nonzero if overflow occurred during the
152 Overflow occurs if A and B have the same sign, but A and SUM differ in
153 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
155 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
157 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
158 We do that by representing the two-word integer in 4 words, with only
159 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
160 number. The value of the word is LOWPART + HIGHPART * BASE. */
163 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
164 #define HIGHPART(x) \
165 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
166 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
168 /* Unpack a two-word integer into 4 words.
169 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
170 WORDS points to the array of HOST_WIDE_INTs. */
173 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
175 words[0] = LOWPART (low);
176 words[1] = HIGHPART (low);
177 words[2] = LOWPART (hi);
178 words[3] = HIGHPART (hi);
181 /* Pack an array of 4 words into a two-word integer.
182 WORDS points to the array of words.
183 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
186 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
189 *low = words[0] + words[1] * BASE;
190 *hi = words[2] + words[3] * BASE;
193 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
194 in overflow of the value, when >0 we are only interested in signed
195 overflow, for <0 we are interested in any overflow. OVERFLOWED
196 indicates whether overflow has already occurred. CONST_OVERFLOWED
197 indicates whether constant overflow has already occurred. We force
198 T's value to be within range of T's type (by setting to 0 or 1 all
199 the bits outside the type's range). We set TREE_OVERFLOWED if,
200 OVERFLOWED is nonzero,
201 or OVERFLOWABLE is >0 and signed overflow occurs
202 or OVERFLOWABLE is <0 and any overflow occurs
203 We set TREE_CONSTANT_OVERFLOWED if,
204 CONST_OVERFLOWED is nonzero
205 or we set TREE_OVERFLOWED.
206 We return either the original T, or a copy. */
209 force_fit_type (tree t, int overflowable,
210 bool overflowed, bool overflowed_const)
212 unsigned HOST_WIDE_INT low;
215 int sign_extended_type;
217 gcc_assert (TREE_CODE (t) == INTEGER_CST);
219 low = TREE_INT_CST_LOW (t);
220 high = TREE_INT_CST_HIGH (t);
222 if (POINTER_TYPE_P (TREE_TYPE (t))
223 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
226 prec = TYPE_PRECISION (TREE_TYPE (t));
227 /* Size types *are* sign extended. */
228 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
229 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
230 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
232 /* First clear all bits that are beyond the type's precision. */
234 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
236 else if (prec > HOST_BITS_PER_WIDE_INT)
237 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
241 if (prec < HOST_BITS_PER_WIDE_INT)
242 low &= ~((HOST_WIDE_INT) (-1) << prec);
245 if (!sign_extended_type)
246 /* No sign extension */;
247 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
248 /* Correct width already. */;
249 else if (prec > HOST_BITS_PER_WIDE_INT)
251 /* Sign extend top half? */
252 if (high & ((unsigned HOST_WIDE_INT)1
253 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
254 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
256 else if (prec == HOST_BITS_PER_WIDE_INT)
258 if ((HOST_WIDE_INT)low < 0)
263 /* Sign extend bottom half? */
264 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
267 low |= (HOST_WIDE_INT)(-1) << prec;
271 /* If the value changed, return a new node. */
272 if (overflowed || overflowed_const
273 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
275 t = build_int_cst_wide (TREE_TYPE (t), low, high);
279 || (overflowable > 0 && sign_extended_type))
282 TREE_OVERFLOW (t) = 1;
283 TREE_CONSTANT_OVERFLOW (t) = 1;
285 else if (overflowed_const)
288 TREE_CONSTANT_OVERFLOW (t) = 1;
295 /* Add two doubleword integers with doubleword result.
296 Return nonzero if the operation overflows according to UNSIGNED_P.
297 Each argument is given as two `HOST_WIDE_INT' pieces.
298 One argument is L1 and H1; the other, L2 and H2.
299 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
302 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
303 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
304 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
307 unsigned HOST_WIDE_INT l;
311 h = h1 + h2 + (l < l1);
317 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
319 return OVERFLOW_SUM_SIGN (h1, h2, h);
322 /* Negate a doubleword integer with doubleword result.
323 Return nonzero if the operation overflows, assuming it's signed.
324 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
325 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
328 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
329 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
335 return (*hv & h1) < 0;
345 /* Multiply two doubleword integers with doubleword result.
346 Return nonzero if the operation overflows according to UNSIGNED_P.
347 Each argument is given as two `HOST_WIDE_INT' pieces.
348 One argument is L1 and H1; the other, L2 and H2.
349 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
352 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
353 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
354 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
357 HOST_WIDE_INT arg1[4];
358 HOST_WIDE_INT arg2[4];
359 HOST_WIDE_INT prod[4 * 2];
360 unsigned HOST_WIDE_INT carry;
362 unsigned HOST_WIDE_INT toplow, neglow;
363 HOST_WIDE_INT tophigh, neghigh;
365 encode (arg1, l1, h1);
366 encode (arg2, l2, h2);
368 memset (prod, 0, sizeof prod);
370 for (i = 0; i < 4; i++)
373 for (j = 0; j < 4; j++)
376 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
377 carry += arg1[i] * arg2[j];
378 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
380 prod[k] = LOWPART (carry);
381 carry = HIGHPART (carry);
386 decode (prod, lv, hv);
387 decode (prod + 4, &toplow, &tophigh);
389 /* Unsigned overflow is immediate. */
391 return (toplow | tophigh) != 0;
393 /* Check for signed overflow by calculating the signed representation of the
394 top half of the result; it should agree with the low half's sign bit. */
397 neg_double (l2, h2, &neglow, &neghigh);
398 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
402 neg_double (l1, h1, &neglow, &neghigh);
403 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
405 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
408 /* Shift the doubleword integer in L1, H1 left by COUNT places
409 keeping only PREC bits of result.
410 Shift right if COUNT is negative.
411 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
412 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
415 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
416 HOST_WIDE_INT count, unsigned int prec,
417 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
419 unsigned HOST_WIDE_INT signmask;
423 rshift_double (l1, h1, -count, prec, lv, hv, arith);
427 if (SHIFT_COUNT_TRUNCATED)
430 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
432 /* Shifting by the host word size is undefined according to the
433 ANSI standard, so we must handle this as a special case. */
437 else if (count >= HOST_BITS_PER_WIDE_INT)
439 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
444 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
445 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
449 /* Sign extend all bits that are beyond the precision. */
451 signmask = -((prec > HOST_BITS_PER_WIDE_INT
452 ? ((unsigned HOST_WIDE_INT) *hv
453 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
454 : (*lv >> (prec - 1))) & 1);
456 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
458 else if (prec >= HOST_BITS_PER_WIDE_INT)
460 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
461 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
466 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
467 *lv |= signmask << prec;
471 /* Shift the doubleword integer in L1, H1 right by COUNT places
472 keeping only PREC bits of result. COUNT must be positive.
473 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
474 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
477 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
478 HOST_WIDE_INT count, unsigned int prec,
479 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
482 unsigned HOST_WIDE_INT signmask;
485 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
488 if (SHIFT_COUNT_TRUNCATED)
491 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
493 /* Shifting by the host word size is undefined according to the
494 ANSI standard, so we must handle this as a special case. */
498 else if (count >= HOST_BITS_PER_WIDE_INT)
501 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
505 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
507 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
510 /* Zero / sign extend all bits that are beyond the precision. */
512 if (count >= (HOST_WIDE_INT)prec)
517 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
519 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
521 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
522 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
527 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
528 *lv |= signmask << (prec - count);
532 /* Rotate the doubleword integer in L1, H1 left by COUNT places
533 keeping only PREC bits of result.
534 Rotate right if COUNT is negative.
535 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
538 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
539 HOST_WIDE_INT count, unsigned int prec,
540 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
542 unsigned HOST_WIDE_INT s1l, s2l;
543 HOST_WIDE_INT s1h, s2h;
549 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
550 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
555 /* Rotate the doubleword integer in L1, H1 left by COUNT places
556 keeping only PREC bits of result. COUNT must be positive.
557 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
560 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
561 HOST_WIDE_INT count, unsigned int prec,
562 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
564 unsigned HOST_WIDE_INT s1l, s2l;
565 HOST_WIDE_INT s1h, s2h;
571 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
572 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
577 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
578 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
579 CODE is a tree code for a kind of division, one of
580 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
582 It controls how the quotient is rounded to an integer.
583 Return nonzero if the operation overflows.
584 UNS nonzero says do unsigned division. */
587 div_and_round_double (enum tree_code code, int uns,
588 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
589 HOST_WIDE_INT hnum_orig,
590 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
591 HOST_WIDE_INT hden_orig,
592 unsigned HOST_WIDE_INT *lquo,
593 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
597 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
598 HOST_WIDE_INT den[4], quo[4];
600 unsigned HOST_WIDE_INT work;
601 unsigned HOST_WIDE_INT carry = 0;
602 unsigned HOST_WIDE_INT lnum = lnum_orig;
603 HOST_WIDE_INT hnum = hnum_orig;
604 unsigned HOST_WIDE_INT lden = lden_orig;
605 HOST_WIDE_INT hden = hden_orig;
608 if (hden == 0 && lden == 0)
609 overflow = 1, lden = 1;
611 /* Calculate quotient sign and convert operands to unsigned. */
617 /* (minimum integer) / (-1) is the only overflow case. */
618 if (neg_double (lnum, hnum, &lnum, &hnum)
619 && ((HOST_WIDE_INT) lden & hden) == -1)
625 neg_double (lden, hden, &lden, &hden);
629 if (hnum == 0 && hden == 0)
630 { /* single precision */
632 /* This unsigned division rounds toward zero. */
638 { /* trivial case: dividend < divisor */
639 /* hden != 0 already checked. */
646 memset (quo, 0, sizeof quo);
648 memset (num, 0, sizeof num); /* to zero 9th element */
649 memset (den, 0, sizeof den);
651 encode (num, lnum, hnum);
652 encode (den, lden, hden);
654 /* Special code for when the divisor < BASE. */
655 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
657 /* hnum != 0 already checked. */
658 for (i = 4 - 1; i >= 0; i--)
660 work = num[i] + carry * BASE;
661 quo[i] = work / lden;
667 /* Full double precision division,
668 with thanks to Don Knuth's "Seminumerical Algorithms". */
669 int num_hi_sig, den_hi_sig;
670 unsigned HOST_WIDE_INT quo_est, scale;
672 /* Find the highest nonzero divisor digit. */
673 for (i = 4 - 1;; i--)
680 /* Insure that the first digit of the divisor is at least BASE/2.
681 This is required by the quotient digit estimation algorithm. */
683 scale = BASE / (den[den_hi_sig] + 1);
685 { /* scale divisor and dividend */
687 for (i = 0; i <= 4 - 1; i++)
689 work = (num[i] * scale) + carry;
690 num[i] = LOWPART (work);
691 carry = HIGHPART (work);
696 for (i = 0; i <= 4 - 1; i++)
698 work = (den[i] * scale) + carry;
699 den[i] = LOWPART (work);
700 carry = HIGHPART (work);
701 if (den[i] != 0) den_hi_sig = i;
708 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
710 /* Guess the next quotient digit, quo_est, by dividing the first
711 two remaining dividend digits by the high order quotient digit.
712 quo_est is never low and is at most 2 high. */
713 unsigned HOST_WIDE_INT tmp;
715 num_hi_sig = i + den_hi_sig + 1;
716 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
717 if (num[num_hi_sig] != den[den_hi_sig])
718 quo_est = work / den[den_hi_sig];
722 /* Refine quo_est so it's usually correct, and at most one high. */
723 tmp = work - quo_est * den[den_hi_sig];
725 && (den[den_hi_sig - 1] * quo_est
726 > (tmp * BASE + num[num_hi_sig - 2])))
729 /* Try QUO_EST as the quotient digit, by multiplying the
730 divisor by QUO_EST and subtracting from the remaining dividend.
731 Keep in mind that QUO_EST is the I - 1st digit. */
734 for (j = 0; j <= den_hi_sig; j++)
736 work = quo_est * den[j] + carry;
737 carry = HIGHPART (work);
738 work = num[i + j] - LOWPART (work);
739 num[i + j] = LOWPART (work);
740 carry += HIGHPART (work) != 0;
743 /* If quo_est was high by one, then num[i] went negative and
744 we need to correct things. */
745 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
748 carry = 0; /* add divisor back in */
749 for (j = 0; j <= den_hi_sig; j++)
751 work = num[i + j] + den[j] + carry;
752 carry = HIGHPART (work);
753 num[i + j] = LOWPART (work);
756 num [num_hi_sig] += carry;
759 /* Store the quotient digit. */
764 decode (quo, lquo, hquo);
767 /* If result is negative, make it so. */
769 neg_double (*lquo, *hquo, lquo, hquo);
771 /* Compute trial remainder: rem = num - (quo * den) */
772 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
773 neg_double (*lrem, *hrem, lrem, hrem);
774 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
779 case TRUNC_MOD_EXPR: /* round toward zero */
780 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
784 case FLOOR_MOD_EXPR: /* round toward negative infinity */
785 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
788 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
796 case CEIL_MOD_EXPR: /* round toward positive infinity */
797 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
799 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
807 case ROUND_MOD_EXPR: /* round to closest integer */
809 unsigned HOST_WIDE_INT labs_rem = *lrem;
810 HOST_WIDE_INT habs_rem = *hrem;
811 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
812 HOST_WIDE_INT habs_den = hden, htwice;
814 /* Get absolute values. */
816 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
818 neg_double (lden, hden, &labs_den, &habs_den);
820 /* If (2 * abs (lrem) >= abs (lden)) */
821 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
822 labs_rem, habs_rem, <wice, &htwice);
824 if (((unsigned HOST_WIDE_INT) habs_den
825 < (unsigned HOST_WIDE_INT) htwice)
826 || (((unsigned HOST_WIDE_INT) habs_den
827 == (unsigned HOST_WIDE_INT) htwice)
828 && (labs_den < ltwice)))
832 add_double (*lquo, *hquo,
833 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
836 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
848 /* Compute true remainder: rem = num - (quo * den) */
849 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
850 neg_double (*lrem, *hrem, lrem, hrem);
851 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
855 /* If ARG2 divides ARG1 with zero remainder, carries out the division
856 of type CODE and returns the quotient.
857 Otherwise returns NULL_TREE. */
860 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
862 unsigned HOST_WIDE_INT int1l, int2l;
863 HOST_WIDE_INT int1h, int2h;
864 unsigned HOST_WIDE_INT quol, reml;
865 HOST_WIDE_INT quoh, remh;
866 tree type = TREE_TYPE (arg1);
867 int uns = TYPE_UNSIGNED (type);
869 int1l = TREE_INT_CST_LOW (arg1);
870 int1h = TREE_INT_CST_HIGH (arg1);
871 int2l = TREE_INT_CST_LOW (arg2);
872 int2h = TREE_INT_CST_HIGH (arg2);
874 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
875 &quol, &quoh, &reml, &remh);
876 if (remh != 0 || reml != 0)
879 return build_int_cst_wide (type, quol, quoh);
882 /* This is non-zero if we should defer warnings about undefined
883 overflow. This facility exists because these warnings are a
884 special case. The code to estimate loop iterations does not want
885 to issue any warnings, since it works with expressions which do not
886 occur in user code. Various bits of cleanup code call fold(), but
887 only use the result if it has certain characteristics (e.g., is a
888 constant); that code only wants to issue a warning if the result is
891 static int fold_deferring_overflow_warnings;
893 /* If a warning about undefined overflow is deferred, this is the
894 warning. Note that this may cause us to turn two warnings into
895 one, but that is fine since it is sufficient to only give one
896 warning per expression. */
898 static const char* fold_deferred_overflow_warning;
900 /* If a warning about undefined overflow is deferred, this is the
901 level at which the warning should be emitted. */
903 static enum warn_strict_overflow_code fold_deferred_overflow_code;
905 /* Start deferring overflow warnings. We could use a stack here to
906 permit nested calls, but at present it is not necessary. */
909 fold_defer_overflow_warnings (void)
911 ++fold_deferring_overflow_warnings;
914 /* Stop deferring overflow warnings. If there is a pending warning,
915 and ISSUE is true, then issue the warning if appropriate. STMT is
916 the statement with which the warning should be associated (used for
917 location information); STMT may be NULL. CODE is the level of the
918 warning--a warn_strict_overflow_code value. This function will use
919 the smaller of CODE and the deferred code when deciding whether to
920 issue the warning. CODE may be zero to mean to always use the
924 fold_undefer_overflow_warnings (bool issue, tree stmt, int code)
929 gcc_assert (fold_deferring_overflow_warnings > 0);
930 --fold_deferring_overflow_warnings;
931 if (fold_deferring_overflow_warnings > 0)
933 if (fold_deferred_overflow_warning != NULL
935 && code < (int) fold_deferred_overflow_code)
936 fold_deferred_overflow_code = code;
940 warnmsg = fold_deferred_overflow_warning;
941 fold_deferred_overflow_warning = NULL;
943 if (!issue || warnmsg == NULL)
946 /* Use the smallest code level when deciding to issue the
948 if (code == 0 || code > (int) fold_deferred_overflow_code)
949 code = fold_deferred_overflow_code;
951 if (!issue_strict_overflow_warning (code))
954 if (stmt == NULL_TREE || !EXPR_HAS_LOCATION (stmt))
955 locus = input_location;
957 locus = EXPR_LOCATION (stmt);
958 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
961 /* Stop deferring overflow warnings, ignoring any deferred
965 fold_undefer_and_ignore_overflow_warnings (void)
967 fold_undefer_overflow_warnings (false, NULL_TREE, 0);
970 /* Whether we are deferring overflow warnings. */
973 fold_deferring_overflow_warnings_p (void)
975 return fold_deferring_overflow_warnings > 0;
978 /* This is called when we fold something based on the fact that signed
979 overflow is undefined. */
982 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
984 gcc_assert (!flag_wrapv && !flag_trapv);
985 if (fold_deferring_overflow_warnings > 0)
987 if (fold_deferred_overflow_warning == NULL
988 || wc < fold_deferred_overflow_code)
990 fold_deferred_overflow_warning = gmsgid;
991 fold_deferred_overflow_code = wc;
994 else if (issue_strict_overflow_warning (wc))
995 warning (OPT_Wstrict_overflow, "%s", gmsgid);
998 /* Return true if the built-in mathematical function specified by CODE
999 is odd, i.e. -f(x) == f(-x). */
1002 negate_mathfn_p (enum built_in_function code)
1006 CASE_FLT_FN (BUILT_IN_ASIN):
1007 CASE_FLT_FN (BUILT_IN_ASINH):
1008 CASE_FLT_FN (BUILT_IN_ATAN):
1009 CASE_FLT_FN (BUILT_IN_ATANH):
1010 CASE_FLT_FN (BUILT_IN_CBRT):
1011 CASE_FLT_FN (BUILT_IN_SIN):
1012 CASE_FLT_FN (BUILT_IN_SINH):
1013 CASE_FLT_FN (BUILT_IN_TAN):
1014 CASE_FLT_FN (BUILT_IN_TANH):
1023 /* Check whether we may negate an integer constant T without causing
1027 may_negate_without_overflow_p (tree t)
1029 unsigned HOST_WIDE_INT val;
1033 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1035 type = TREE_TYPE (t);
1036 if (TYPE_UNSIGNED (type))
1039 prec = TYPE_PRECISION (type);
1040 if (prec > HOST_BITS_PER_WIDE_INT)
1042 if (TREE_INT_CST_LOW (t) != 0)
1044 prec -= HOST_BITS_PER_WIDE_INT;
1045 val = TREE_INT_CST_HIGH (t);
1048 val = TREE_INT_CST_LOW (t);
1049 if (prec < HOST_BITS_PER_WIDE_INT)
1050 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1051 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1054 /* Determine whether an expression T can be cheaply negated using
1055 the function negate_expr without introducing undefined overflow. */
1058 negate_expr_p (tree t)
1065 type = TREE_TYPE (t);
1067 STRIP_SIGN_NOPS (t);
1068 switch (TREE_CODE (t))
1071 if (TYPE_OVERFLOW_WRAPS (type))
1074 /* Check that -CST will not overflow type. */
1075 return may_negate_without_overflow_p (t);
1077 return (INTEGRAL_TYPE_P (type)
1078 && TYPE_OVERFLOW_WRAPS (type));
1085 return negate_expr_p (TREE_REALPART (t))
1086 && negate_expr_p (TREE_IMAGPART (t));
1089 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
1091 /* -(A + B) -> (-B) - A. */
1092 if (negate_expr_p (TREE_OPERAND (t, 1))
1093 && reorder_operands_p (TREE_OPERAND (t, 0),
1094 TREE_OPERAND (t, 1)))
1096 /* -(A + B) -> (-A) - B. */
1097 return negate_expr_p (TREE_OPERAND (t, 0));
1100 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1101 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1102 && reorder_operands_p (TREE_OPERAND (t, 0),
1103 TREE_OPERAND (t, 1));
1106 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1112 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1113 return negate_expr_p (TREE_OPERAND (t, 1))
1114 || negate_expr_p (TREE_OPERAND (t, 0));
1117 case TRUNC_DIV_EXPR:
1118 case ROUND_DIV_EXPR:
1119 case FLOOR_DIV_EXPR:
1121 case EXACT_DIV_EXPR:
1122 /* In general we can't negate A / B, because if A is INT_MIN and
1123 B is 1, we may turn this into INT_MIN / -1 which is undefined
1124 and actually traps on some architectures. But if overflow is
1125 undefined, we can negate, because - (INT_MIN / 1) is an
1127 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1128 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1130 return negate_expr_p (TREE_OPERAND (t, 1))
1131 || negate_expr_p (TREE_OPERAND (t, 0));
1134 /* Negate -((double)float) as (double)(-float). */
1135 if (TREE_CODE (type) == REAL_TYPE)
1137 tree tem = strip_float_extensions (t);
1139 return negate_expr_p (tem);
1144 /* Negate -f(x) as f(-x). */
1145 if (negate_mathfn_p (builtin_mathfn_code (t)))
1146 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1150 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1151 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1153 tree op1 = TREE_OPERAND (t, 1);
1154 if (TREE_INT_CST_HIGH (op1) == 0
1155 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1156 == TREE_INT_CST_LOW (op1))
1167 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1168 simplification is possible.
1169 If negate_expr_p would return true for T, NULL_TREE will never be
1173 fold_negate_expr (tree t)
1175 tree type = TREE_TYPE (t);
1178 switch (TREE_CODE (t))
1180 /* Convert - (~A) to A + 1. */
1182 if (INTEGRAL_TYPE_P (type))
1183 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1184 build_int_cst (type, 1));
1188 tem = fold_negate_const (t, type);
1189 if (!TREE_OVERFLOW (tem)
1190 || !TYPE_OVERFLOW_TRAPS (type))
1195 tem = fold_negate_const (t, type);
1196 /* Two's complement FP formats, such as c4x, may overflow. */
1197 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1203 tree rpart = negate_expr (TREE_REALPART (t));
1204 tree ipart = negate_expr (TREE_IMAGPART (t));
1206 if ((TREE_CODE (rpart) == REAL_CST
1207 && TREE_CODE (ipart) == REAL_CST)
1208 || (TREE_CODE (rpart) == INTEGER_CST
1209 && TREE_CODE (ipart) == INTEGER_CST))
1210 return build_complex (type, rpart, ipart);
1215 return TREE_OPERAND (t, 0);
1218 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1220 /* -(A + B) -> (-B) - A. */
1221 if (negate_expr_p (TREE_OPERAND (t, 1))
1222 && reorder_operands_p (TREE_OPERAND (t, 0),
1223 TREE_OPERAND (t, 1)))
1225 tem = negate_expr (TREE_OPERAND (t, 1));
1226 return fold_build2 (MINUS_EXPR, type,
1227 tem, TREE_OPERAND (t, 0));
1230 /* -(A + B) -> (-A) - B. */
1231 if (negate_expr_p (TREE_OPERAND (t, 0)))
1233 tem = negate_expr (TREE_OPERAND (t, 0));
1234 return fold_build2 (MINUS_EXPR, type,
1235 tem, TREE_OPERAND (t, 1));
1241 /* - (A - B) -> B - A */
1242 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1243 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1244 return fold_build2 (MINUS_EXPR, type,
1245 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1249 if (TYPE_UNSIGNED (type))
1255 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1257 tem = TREE_OPERAND (t, 1);
1258 if (negate_expr_p (tem))
1259 return fold_build2 (TREE_CODE (t), type,
1260 TREE_OPERAND (t, 0), negate_expr (tem));
1261 tem = TREE_OPERAND (t, 0);
1262 if (negate_expr_p (tem))
1263 return fold_build2 (TREE_CODE (t), type,
1264 negate_expr (tem), TREE_OPERAND (t, 1));
1268 case TRUNC_DIV_EXPR:
1269 case ROUND_DIV_EXPR:
1270 case FLOOR_DIV_EXPR:
1272 case EXACT_DIV_EXPR:
1273 /* In general we can't negate A / B, because if A is INT_MIN and
1274 B is 1, we may turn this into INT_MIN / -1 which is undefined
1275 and actually traps on some architectures. But if overflow is
1276 undefined, we can negate, because - (INT_MIN / 1) is an
1278 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1280 const char * const warnmsg = G_("assuming signed overflow does not "
1281 "occur when negating a division");
1282 tem = TREE_OPERAND (t, 1);
1283 if (negate_expr_p (tem))
1285 if (INTEGRAL_TYPE_P (type)
1286 && (TREE_CODE (tem) != INTEGER_CST
1287 || integer_onep (tem)))
1288 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1289 return fold_build2 (TREE_CODE (t), type,
1290 TREE_OPERAND (t, 0), negate_expr (tem));
1292 tem = TREE_OPERAND (t, 0);
1293 if (negate_expr_p (tem))
1295 if (INTEGRAL_TYPE_P (type)
1296 && (TREE_CODE (tem) != INTEGER_CST
1297 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1298 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1299 return fold_build2 (TREE_CODE (t), type,
1300 negate_expr (tem), TREE_OPERAND (t, 1));
1306 /* Convert -((double)float) into (double)(-float). */
1307 if (TREE_CODE (type) == REAL_TYPE)
1309 tem = strip_float_extensions (t);
1310 if (tem != t && negate_expr_p (tem))
1311 return negate_expr (tem);
1316 /* Negate -f(x) as f(-x). */
1317 if (negate_mathfn_p (builtin_mathfn_code (t))
1318 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1320 tree fndecl, arg, arglist;
1322 fndecl = get_callee_fndecl (t);
1323 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1324 arglist = build_tree_list (NULL_TREE, arg);
1325 return build_function_call_expr (fndecl, arglist);
1330 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1331 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1333 tree op1 = TREE_OPERAND (t, 1);
1334 if (TREE_INT_CST_HIGH (op1) == 0
1335 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1336 == TREE_INT_CST_LOW (op1))
1338 tree ntype = TYPE_UNSIGNED (type)
1339 ? lang_hooks.types.signed_type (type)
1340 : lang_hooks.types.unsigned_type (type);
1341 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1342 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1343 return fold_convert (type, temp);
1355 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1356 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1357 return NULL_TREE. */
1360 negate_expr (tree t)
1367 type = TREE_TYPE (t);
1368 STRIP_SIGN_NOPS (t);
1370 tem = fold_negate_expr (t);
1372 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1373 return fold_convert (type, tem);
1376 /* Split a tree IN into a constant, literal and variable parts that could be
1377 combined with CODE to make IN. "constant" means an expression with
1378 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1379 commutative arithmetic operation. Store the constant part into *CONP,
1380 the literal in *LITP and return the variable part. If a part isn't
1381 present, set it to null. If the tree does not decompose in this way,
1382 return the entire tree as the variable part and the other parts as null.
1384 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1385 case, we negate an operand that was subtracted. Except if it is a
1386 literal for which we use *MINUS_LITP instead.
1388 If NEGATE_P is true, we are negating all of IN, again except a literal
1389 for which we use *MINUS_LITP instead.
1391 If IN is itself a literal or constant, return it as appropriate.
1393 Note that we do not guarantee that any of the three values will be the
1394 same type as IN, but they will have the same signedness and mode. */
1397 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1398 tree *minus_litp, int negate_p)
1406 /* Strip any conversions that don't change the machine mode or signedness. */
1407 STRIP_SIGN_NOPS (in);
1409 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1411 else if (TREE_CODE (in) == code
1412 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1413 /* We can associate addition and subtraction together (even
1414 though the C standard doesn't say so) for integers because
1415 the value is not affected. For reals, the value might be
1416 affected, so we can't. */
1417 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1418 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1420 tree op0 = TREE_OPERAND (in, 0);
1421 tree op1 = TREE_OPERAND (in, 1);
1422 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1423 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1425 /* First see if either of the operands is a literal, then a constant. */
1426 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1427 *litp = op0, op0 = 0;
1428 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1429 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1431 if (op0 != 0 && TREE_CONSTANT (op0))
1432 *conp = op0, op0 = 0;
1433 else if (op1 != 0 && TREE_CONSTANT (op1))
1434 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1436 /* If we haven't dealt with either operand, this is not a case we can
1437 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1438 if (op0 != 0 && op1 != 0)
1443 var = op1, neg_var_p = neg1_p;
1445 /* Now do any needed negations. */
1447 *minus_litp = *litp, *litp = 0;
1449 *conp = negate_expr (*conp);
1451 var = negate_expr (var);
1453 else if (TREE_CONSTANT (in))
1461 *minus_litp = *litp, *litp = 0;
1462 else if (*minus_litp)
1463 *litp = *minus_litp, *minus_litp = 0;
1464 *conp = negate_expr (*conp);
1465 var = negate_expr (var);
1471 /* Re-associate trees split by the above function. T1 and T2 are either
1472 expressions to associate or null. Return the new expression, if any. If
1473 we build an operation, do it in TYPE and with CODE. */
1476 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1483 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1484 try to fold this since we will have infinite recursion. But do
1485 deal with any NEGATE_EXPRs. */
1486 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1487 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1489 if (code == PLUS_EXPR)
1491 if (TREE_CODE (t1) == NEGATE_EXPR)
1492 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1493 fold_convert (type, TREE_OPERAND (t1, 0)));
1494 else if (TREE_CODE (t2) == NEGATE_EXPR)
1495 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1496 fold_convert (type, TREE_OPERAND (t2, 0)));
1497 else if (integer_zerop (t2))
1498 return fold_convert (type, t1);
1500 else if (code == MINUS_EXPR)
1502 if (integer_zerop (t2))
1503 return fold_convert (type, t1);
1506 return build2 (code, type, fold_convert (type, t1),
1507 fold_convert (type, t2));
1510 return fold_build2 (code, type, fold_convert (type, t1),
1511 fold_convert (type, t2));
1514 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1515 to produce a new constant. Return NULL_TREE if we don't know how
1516 to evaluate CODE at compile-time.
1518 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1521 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1523 unsigned HOST_WIDE_INT int1l, int2l;
1524 HOST_WIDE_INT int1h, int2h;
1525 unsigned HOST_WIDE_INT low;
1527 unsigned HOST_WIDE_INT garbagel;
1528 HOST_WIDE_INT garbageh;
1530 tree type = TREE_TYPE (arg1);
1531 int uns = TYPE_UNSIGNED (type);
1533 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1536 int1l = TREE_INT_CST_LOW (arg1);
1537 int1h = TREE_INT_CST_HIGH (arg1);
1538 int2l = TREE_INT_CST_LOW (arg2);
1539 int2h = TREE_INT_CST_HIGH (arg2);
1544 low = int1l | int2l, hi = int1h | int2h;
1548 low = int1l ^ int2l, hi = int1h ^ int2h;
1552 low = int1l & int2l, hi = int1h & int2h;
1558 /* It's unclear from the C standard whether shifts can overflow.
1559 The following code ignores overflow; perhaps a C standard
1560 interpretation ruling is needed. */
1561 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1568 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1573 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1577 neg_double (int2l, int2h, &low, &hi);
1578 add_double (int1l, int1h, low, hi, &low, &hi);
1579 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1583 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1586 case TRUNC_DIV_EXPR:
1587 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1588 case EXACT_DIV_EXPR:
1589 /* This is a shortcut for a common special case. */
1590 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1591 && ! TREE_CONSTANT_OVERFLOW (arg1)
1592 && ! TREE_CONSTANT_OVERFLOW (arg2)
1593 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1595 if (code == CEIL_DIV_EXPR)
1598 low = int1l / int2l, hi = 0;
1602 /* ... fall through ... */
1604 case ROUND_DIV_EXPR:
1605 if (int2h == 0 && int2l == 0)
1607 if (int2h == 0 && int2l == 1)
1609 low = int1l, hi = int1h;
1612 if (int1l == int2l && int1h == int2h
1613 && ! (int1l == 0 && int1h == 0))
1618 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1619 &low, &hi, &garbagel, &garbageh);
1622 case TRUNC_MOD_EXPR:
1623 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1624 /* This is a shortcut for a common special case. */
1625 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1626 && ! TREE_CONSTANT_OVERFLOW (arg1)
1627 && ! TREE_CONSTANT_OVERFLOW (arg2)
1628 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1630 if (code == CEIL_MOD_EXPR)
1632 low = int1l % int2l, hi = 0;
1636 /* ... fall through ... */
1638 case ROUND_MOD_EXPR:
1639 if (int2h == 0 && int2l == 0)
1641 overflow = div_and_round_double (code, uns,
1642 int1l, int1h, int2l, int2h,
1643 &garbagel, &garbageh, &low, &hi);
1649 low = (((unsigned HOST_WIDE_INT) int1h
1650 < (unsigned HOST_WIDE_INT) int2h)
1651 || (((unsigned HOST_WIDE_INT) int1h
1652 == (unsigned HOST_WIDE_INT) int2h)
1655 low = (int1h < int2h
1656 || (int1h == int2h && int1l < int2l));
1658 if (low == (code == MIN_EXPR))
1659 low = int1l, hi = int1h;
1661 low = int2l, hi = int2h;
1668 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1672 /* Propagate overflow flags ourselves. */
1673 if (((!uns || is_sizetype) && overflow)
1674 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1677 TREE_OVERFLOW (t) = 1;
1678 TREE_CONSTANT_OVERFLOW (t) = 1;
1680 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1683 TREE_CONSTANT_OVERFLOW (t) = 1;
1687 t = force_fit_type (t, 1,
1688 ((!uns || is_sizetype) && overflow)
1689 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1690 TREE_CONSTANT_OVERFLOW (arg1)
1691 | TREE_CONSTANT_OVERFLOW (arg2));
1696 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1697 constant. We assume ARG1 and ARG2 have the same data type, or at least
1698 are the same kind of constant and the same machine mode. Return zero if
1699 combining the constants is not allowed in the current operating mode.
1701 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1704 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1706 /* Sanity check for the recursive cases. */
1713 if (TREE_CODE (arg1) == INTEGER_CST)
1714 return int_const_binop (code, arg1, arg2, notrunc);
1716 if (TREE_CODE (arg1) == REAL_CST)
1718 enum machine_mode mode;
1721 REAL_VALUE_TYPE value;
1722 REAL_VALUE_TYPE result;
1726 /* The following codes are handled by real_arithmetic. */
1741 d1 = TREE_REAL_CST (arg1);
1742 d2 = TREE_REAL_CST (arg2);
1744 type = TREE_TYPE (arg1);
1745 mode = TYPE_MODE (type);
1747 /* Don't perform operation if we honor signaling NaNs and
1748 either operand is a NaN. */
1749 if (HONOR_SNANS (mode)
1750 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1753 /* Don't perform operation if it would raise a division
1754 by zero exception. */
1755 if (code == RDIV_EXPR
1756 && REAL_VALUES_EQUAL (d2, dconst0)
1757 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1760 /* If either operand is a NaN, just return it. Otherwise, set up
1761 for floating-point trap; we return an overflow. */
1762 if (REAL_VALUE_ISNAN (d1))
1764 else if (REAL_VALUE_ISNAN (d2))
1767 inexact = real_arithmetic (&value, code, &d1, &d2);
1768 real_convert (&result, mode, &value);
1770 /* Don't constant fold this floating point operation if
1771 the result has overflowed and flag_trapping_math. */
1772 if (flag_trapping_math
1773 && MODE_HAS_INFINITIES (mode)
1774 && REAL_VALUE_ISINF (result)
1775 && !REAL_VALUE_ISINF (d1)
1776 && !REAL_VALUE_ISINF (d2))
1779 /* Don't constant fold this floating point operation if the
1780 result may dependent upon the run-time rounding mode and
1781 flag_rounding_math is set, or if GCC's software emulation
1782 is unable to accurately represent the result. */
1783 if ((flag_rounding_math
1784 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1785 && !flag_unsafe_math_optimizations))
1786 && (inexact || !real_identical (&result, &value)))
1789 t = build_real (type, result);
1791 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1792 TREE_CONSTANT_OVERFLOW (t)
1794 | TREE_CONSTANT_OVERFLOW (arg1)
1795 | TREE_CONSTANT_OVERFLOW (arg2);
1799 if (TREE_CODE (arg1) == COMPLEX_CST)
1801 tree type = TREE_TYPE (arg1);
1802 tree r1 = TREE_REALPART (arg1);
1803 tree i1 = TREE_IMAGPART (arg1);
1804 tree r2 = TREE_REALPART (arg2);
1805 tree i2 = TREE_IMAGPART (arg2);
1812 real = const_binop (code, r1, r2, notrunc);
1813 imag = const_binop (code, i1, i2, notrunc);
1817 real = const_binop (MINUS_EXPR,
1818 const_binop (MULT_EXPR, r1, r2, notrunc),
1819 const_binop (MULT_EXPR, i1, i2, notrunc),
1821 imag = const_binop (PLUS_EXPR,
1822 const_binop (MULT_EXPR, r1, i2, notrunc),
1823 const_binop (MULT_EXPR, i1, r2, notrunc),
1830 = const_binop (PLUS_EXPR,
1831 const_binop (MULT_EXPR, r2, r2, notrunc),
1832 const_binop (MULT_EXPR, i2, i2, notrunc),
1835 = const_binop (PLUS_EXPR,
1836 const_binop (MULT_EXPR, r1, r2, notrunc),
1837 const_binop (MULT_EXPR, i1, i2, notrunc),
1840 = const_binop (MINUS_EXPR,
1841 const_binop (MULT_EXPR, i1, r2, notrunc),
1842 const_binop (MULT_EXPR, r1, i2, notrunc),
1845 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1846 code = TRUNC_DIV_EXPR;
1848 real = const_binop (code, t1, magsquared, notrunc);
1849 imag = const_binop (code, t2, magsquared, notrunc);
1858 return build_complex (type, real, imag);
1864 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1865 indicates which particular sizetype to create. */
1868 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1870 return build_int_cst (sizetype_tab[(int) kind], number);
1873 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1874 is a tree code. The type of the result is taken from the operands.
1875 Both must be the same type integer type and it must be a size type.
1876 If the operands are constant, so is the result. */
1879 size_binop (enum tree_code code, tree arg0, tree arg1)
1881 tree type = TREE_TYPE (arg0);
1883 if (arg0 == error_mark_node || arg1 == error_mark_node)
1884 return error_mark_node;
1886 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1887 && type == TREE_TYPE (arg1));
1889 /* Handle the special case of two integer constants faster. */
1890 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1892 /* And some specific cases even faster than that. */
1893 if (code == PLUS_EXPR && integer_zerop (arg0))
1895 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1896 && integer_zerop (arg1))
1898 else if (code == MULT_EXPR && integer_onep (arg0))
1901 /* Handle general case of two integer constants. */
1902 return int_const_binop (code, arg0, arg1, 0);
1905 return fold_build2 (code, type, arg0, arg1);
1908 /* Given two values, either both of sizetype or both of bitsizetype,
1909 compute the difference between the two values. Return the value
1910 in signed type corresponding to the type of the operands. */
1913 size_diffop (tree arg0, tree arg1)
1915 tree type = TREE_TYPE (arg0);
1918 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1919 && type == TREE_TYPE (arg1));
1921 /* If the type is already signed, just do the simple thing. */
1922 if (!TYPE_UNSIGNED (type))
1923 return size_binop (MINUS_EXPR, arg0, arg1);
1925 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1927 /* If either operand is not a constant, do the conversions to the signed
1928 type and subtract. The hardware will do the right thing with any
1929 overflow in the subtraction. */
1930 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1931 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1932 fold_convert (ctype, arg1));
1934 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1935 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1936 overflow) and negate (which can't either). Special-case a result
1937 of zero while we're here. */
1938 if (tree_int_cst_equal (arg0, arg1))
1939 return build_int_cst (ctype, 0);
1940 else if (tree_int_cst_lt (arg1, arg0))
1941 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1943 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1944 fold_convert (ctype, size_binop (MINUS_EXPR,
1948 /* A subroutine of fold_convert_const handling conversions of an
1949 INTEGER_CST to another integer type. */
1952 fold_convert_const_int_from_int (tree type, tree arg1)
1956 /* Given an integer constant, make new constant with new type,
1957 appropriately sign-extended or truncated. */
1958 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1959 TREE_INT_CST_HIGH (arg1));
1961 t = force_fit_type (t,
1962 /* Don't set the overflow when
1963 converting a pointer */
1964 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1965 (TREE_INT_CST_HIGH (arg1) < 0
1966 && (TYPE_UNSIGNED (type)
1967 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1968 | TREE_OVERFLOW (arg1),
1969 TREE_CONSTANT_OVERFLOW (arg1));
1974 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1975 to an integer type. */
1978 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1983 /* The following code implements the floating point to integer
1984 conversion rules required by the Java Language Specification,
1985 that IEEE NaNs are mapped to zero and values that overflow
1986 the target precision saturate, i.e. values greater than
1987 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1988 are mapped to INT_MIN. These semantics are allowed by the
1989 C and C++ standards that simply state that the behavior of
1990 FP-to-integer conversion is unspecified upon overflow. */
1992 HOST_WIDE_INT high, low;
1994 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1998 case FIX_TRUNC_EXPR:
1999 real_trunc (&r, VOIDmode, &x);
2003 real_ceil (&r, VOIDmode, &x);
2006 case FIX_FLOOR_EXPR:
2007 real_floor (&r, VOIDmode, &x);
2010 case FIX_ROUND_EXPR:
2011 real_round (&r, VOIDmode, &x);
2018 /* If R is NaN, return zero and show we have an overflow. */
2019 if (REAL_VALUE_ISNAN (r))
2026 /* See if R is less than the lower bound or greater than the
2031 tree lt = TYPE_MIN_VALUE (type);
2032 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2033 if (REAL_VALUES_LESS (r, l))
2036 high = TREE_INT_CST_HIGH (lt);
2037 low = TREE_INT_CST_LOW (lt);
2043 tree ut = TYPE_MAX_VALUE (type);
2046 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2047 if (REAL_VALUES_LESS (u, r))
2050 high = TREE_INT_CST_HIGH (ut);
2051 low = TREE_INT_CST_LOW (ut);
2057 REAL_VALUE_TO_INT (&low, &high, r);
2059 t = build_int_cst_wide (type, low, high);
2061 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
2062 TREE_CONSTANT_OVERFLOW (arg1));
2066 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2067 to another floating point type. */
2070 fold_convert_const_real_from_real (tree type, tree arg1)
2072 REAL_VALUE_TYPE value;
2075 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2076 t = build_real (type, value);
2078 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2079 TREE_CONSTANT_OVERFLOW (t)
2080 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2084 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2085 type TYPE. If no simplification can be done return NULL_TREE. */
2088 fold_convert_const (enum tree_code code, tree type, tree arg1)
2090 if (TREE_TYPE (arg1) == type)
2093 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2095 if (TREE_CODE (arg1) == INTEGER_CST)
2096 return fold_convert_const_int_from_int (type, arg1);
2097 else if (TREE_CODE (arg1) == REAL_CST)
2098 return fold_convert_const_int_from_real (code, type, arg1);
2100 else if (TREE_CODE (type) == REAL_TYPE)
2102 if (TREE_CODE (arg1) == INTEGER_CST)
2103 return build_real_from_int_cst (type, arg1);
2104 if (TREE_CODE (arg1) == REAL_CST)
2105 return fold_convert_const_real_from_real (type, arg1);
2110 /* Construct a vector of zero elements of vector type TYPE. */
2113 build_zero_vector (tree type)
2118 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2119 units = TYPE_VECTOR_SUBPARTS (type);
2122 for (i = 0; i < units; i++)
2123 list = tree_cons (NULL_TREE, elem, list);
2124 return build_vector (type, list);
2127 /* Convert expression ARG to type TYPE. Used by the middle-end for
2128 simple conversions in preference to calling the front-end's convert. */
2131 fold_convert (tree type, tree arg)
2133 tree orig = TREE_TYPE (arg);
2139 if (TREE_CODE (arg) == ERROR_MARK
2140 || TREE_CODE (type) == ERROR_MARK
2141 || TREE_CODE (orig) == ERROR_MARK)
2142 return error_mark_node;
2144 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2145 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2146 TYPE_MAIN_VARIANT (orig)))
2147 return fold_build1 (NOP_EXPR, type, arg);
2149 switch (TREE_CODE (type))
2151 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2152 case POINTER_TYPE: case REFERENCE_TYPE:
2153 /* APPLE LOCAL blocks 5862465 */
2154 case BLOCK_POINTER_TYPE:
2156 if (TREE_CODE (arg) == INTEGER_CST)
2158 tem = fold_convert_const (NOP_EXPR, type, arg);
2159 if (tem != NULL_TREE)
2162 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2163 || TREE_CODE (orig) == OFFSET_TYPE)
2164 return fold_build1 (NOP_EXPR, type, arg);
2165 if (TREE_CODE (orig) == COMPLEX_TYPE)
2167 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2168 return fold_convert (type, tem);
2170 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2171 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2172 return fold_build1 (NOP_EXPR, type, arg);
2175 if (TREE_CODE (arg) == INTEGER_CST)
2177 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2178 if (tem != NULL_TREE)
2181 else if (TREE_CODE (arg) == REAL_CST)
2183 tem = fold_convert_const (NOP_EXPR, type, arg);
2184 if (tem != NULL_TREE)
2188 switch (TREE_CODE (orig))
2191 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2192 case POINTER_TYPE: case REFERENCE_TYPE:
2193 return fold_build1 (FLOAT_EXPR, type, arg);
2196 return fold_build1 (NOP_EXPR, type, arg);
2199 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2200 return fold_convert (type, tem);
2207 switch (TREE_CODE (orig))
2210 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2211 case POINTER_TYPE: case REFERENCE_TYPE:
2213 return build2 (COMPLEX_EXPR, type,
2214 fold_convert (TREE_TYPE (type), arg),
2215 fold_convert (TREE_TYPE (type), integer_zero_node));
2220 if (TREE_CODE (arg) == COMPLEX_EXPR)
2222 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2223 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2224 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2227 arg = save_expr (arg);
2228 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2229 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2230 rpart = fold_convert (TREE_TYPE (type), rpart);
2231 ipart = fold_convert (TREE_TYPE (type), ipart);
2232 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2240 if (integer_zerop (arg))
2241 return build_zero_vector (type);
2242 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2243 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2244 || TREE_CODE (orig) == VECTOR_TYPE);
2245 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2248 return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
2255 /* Return false if expr can be assumed not to be an lvalue, true
2259 maybe_lvalue_p (tree x)
2261 /* We only need to wrap lvalue tree codes. */
2262 switch (TREE_CODE (x))
2273 case ALIGN_INDIRECT_REF:
2274 case MISALIGNED_INDIRECT_REF:
2276 case ARRAY_RANGE_REF:
2282 case PREINCREMENT_EXPR:
2283 case PREDECREMENT_EXPR:
2285 case TRY_CATCH_EXPR:
2286 case WITH_CLEANUP_EXPR:
2297 /* Assume the worst for front-end tree codes. */
2298 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2306 /* Return an expr equal to X but certainly not valid as an lvalue. */
2311 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2316 if (! maybe_lvalue_p (x))
2318 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2321 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2322 Zero means allow extended lvalues. */
2324 int pedantic_lvalues;
2326 /* When pedantic, return an expr equal to X but certainly not valid as a
2327 pedantic lvalue. Otherwise, return X. */
2330 pedantic_non_lvalue (tree x)
2332 if (pedantic_lvalues)
2333 return non_lvalue (x);
2338 /* Given a tree comparison code, return the code that is the logical inverse
2339 of the given code. It is not safe to do this for floating-point
2340 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2341 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2344 invert_tree_comparison (enum tree_code code, bool honor_nans)
2346 if (honor_nans && flag_trapping_math)
2356 return honor_nans ? UNLE_EXPR : LE_EXPR;
2358 return honor_nans ? UNLT_EXPR : LT_EXPR;
2360 return honor_nans ? UNGE_EXPR : GE_EXPR;
2362 return honor_nans ? UNGT_EXPR : GT_EXPR;
2376 return UNORDERED_EXPR;
2377 case UNORDERED_EXPR:
2378 return ORDERED_EXPR;
2384 /* Similar, but return the comparison that results if the operands are
2385 swapped. This is safe for floating-point. */
2388 swap_tree_comparison (enum tree_code code)
2395 case UNORDERED_EXPR:
2421 /* Convert a comparison tree code from an enum tree_code representation
2422 into a compcode bit-based encoding. This function is the inverse of
2423 compcode_to_comparison. */
2425 static enum comparison_code
2426 comparison_to_compcode (enum tree_code code)
2443 return COMPCODE_ORD;
2444 case UNORDERED_EXPR:
2445 return COMPCODE_UNORD;
2447 return COMPCODE_UNLT;
2449 return COMPCODE_UNEQ;
2451 return COMPCODE_UNLE;
2453 return COMPCODE_UNGT;
2455 return COMPCODE_LTGT;
2457 return COMPCODE_UNGE;
2463 /* Convert a compcode bit-based encoding of a comparison operator back
2464 to GCC's enum tree_code representation. This function is the
2465 inverse of comparison_to_compcode. */
2467 static enum tree_code
2468 compcode_to_comparison (enum comparison_code code)
2485 return ORDERED_EXPR;
2486 case COMPCODE_UNORD:
2487 return UNORDERED_EXPR;
2505 /* Return a tree for the comparison which is the combination of
2506 doing the AND or OR (depending on CODE) of the two operations LCODE
2507 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2508 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2509 if this makes the transformation invalid. */
2512 combine_comparisons (enum tree_code code, enum tree_code lcode,
2513 enum tree_code rcode, tree truth_type,
2514 tree ll_arg, tree lr_arg)
2516 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2517 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2518 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2519 enum comparison_code compcode;
2523 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2524 compcode = lcompcode & rcompcode;
2527 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2528 compcode = lcompcode | rcompcode;
2537 /* Eliminate unordered comparisons, as well as LTGT and ORD
2538 which are not used unless the mode has NaNs. */
2539 compcode &= ~COMPCODE_UNORD;
2540 if (compcode == COMPCODE_LTGT)
2541 compcode = COMPCODE_NE;
2542 else if (compcode == COMPCODE_ORD)
2543 compcode = COMPCODE_TRUE;
2545 else if (flag_trapping_math)
2547 /* Check that the original operation and the optimized ones will trap
2548 under the same condition. */
2549 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2550 && (lcompcode != COMPCODE_EQ)
2551 && (lcompcode != COMPCODE_ORD);
2552 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2553 && (rcompcode != COMPCODE_EQ)
2554 && (rcompcode != COMPCODE_ORD);
2555 bool trap = (compcode & COMPCODE_UNORD) == 0
2556 && (compcode != COMPCODE_EQ)
2557 && (compcode != COMPCODE_ORD);
2559 /* In a short-circuited boolean expression the LHS might be
2560 such that the RHS, if evaluated, will never trap. For
2561 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2562 if neither x nor y is NaN. (This is a mixed blessing: for
2563 example, the expression above will never trap, hence
2564 optimizing it to x < y would be invalid). */
2565 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2566 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2569 /* If the comparison was short-circuited, and only the RHS
2570 trapped, we may now generate a spurious trap. */
2572 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2575 /* If we changed the conditions that cause a trap, we lose. */
2576 if ((ltrap || rtrap) != trap)
2580 if (compcode == COMPCODE_TRUE)
2581 return constant_boolean_node (true, truth_type);
2582 else if (compcode == COMPCODE_FALSE)
2583 return constant_boolean_node (false, truth_type);
2585 return fold_build2 (compcode_to_comparison (compcode),
2586 truth_type, ll_arg, lr_arg);
2589 /* Return nonzero if CODE is a tree code that represents a truth value. */
2592 truth_value_p (enum tree_code code)
2594 return (TREE_CODE_CLASS (code) == tcc_comparison
2595 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2596 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2597 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2600 /* Return nonzero if two operands (typically of the same tree node)
2601 are necessarily equal. If either argument has side-effects this
2602 function returns zero. FLAGS modifies behavior as follows:
2604 If OEP_ONLY_CONST is set, only return nonzero for constants.
2605 This function tests whether the operands are indistinguishable;
2606 it does not test whether they are equal using C's == operation.
2607 The distinction is important for IEEE floating point, because
2608 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2609 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2611 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2612 even though it may hold multiple values during a function.
2613 This is because a GCC tree node guarantees that nothing else is
2614 executed between the evaluation of its "operands" (which may often
2615 be evaluated in arbitrary order). Hence if the operands themselves
2616 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2617 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2618 unset means assuming isochronic (or instantaneous) tree equivalence.
2619 Unless comparing arbitrary expression trees, such as from different
2620 statements, this flag can usually be left unset.
2622 If OEP_PURE_SAME is set, then pure functions with identical arguments
2623 are considered the same. It is used when the caller has other ways
2624 to ensure that global memory is unchanged in between. */
2627 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2629 /* If either is ERROR_MARK, they aren't equal. */
2630 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2633 /* If both types don't have the same signedness, then we can't consider
2634 them equal. We must check this before the STRIP_NOPS calls
2635 because they may change the signedness of the arguments. */
2636 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2639 /* If both types don't have the same precision, then it is not safe
2641 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2647 /* In case both args are comparisons but with different comparison
2648 code, try to swap the comparison operands of one arg to produce
2649 a match and compare that variant. */
2650 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2651 && COMPARISON_CLASS_P (arg0)
2652 && COMPARISON_CLASS_P (arg1))
2654 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2656 if (TREE_CODE (arg0) == swap_code)
2657 return operand_equal_p (TREE_OPERAND (arg0, 0),
2658 TREE_OPERAND (arg1, 1), flags)
2659 && operand_equal_p (TREE_OPERAND (arg0, 1),
2660 TREE_OPERAND (arg1, 0), flags);
2663 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2664 /* This is needed for conversions and for COMPONENT_REF.
2665 Might as well play it safe and always test this. */
2666 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2667 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2668 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2671 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2672 We don't care about side effects in that case because the SAVE_EXPR
2673 takes care of that for us. In all other cases, two expressions are
2674 equal if they have no side effects. If we have two identical
2675 expressions with side effects that should be treated the same due
2676 to the only side effects being identical SAVE_EXPR's, that will
2677 be detected in the recursive calls below. */
2678 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2679 && (TREE_CODE (arg0) == SAVE_EXPR
2680 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2683 /* Next handle constant cases, those for which we can return 1 even
2684 if ONLY_CONST is set. */
2685 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2686 switch (TREE_CODE (arg0))
2689 return (! TREE_CONSTANT_OVERFLOW (arg0)
2690 && ! TREE_CONSTANT_OVERFLOW (arg1)
2691 && tree_int_cst_equal (arg0, arg1));
2694 return (! TREE_CONSTANT_OVERFLOW (arg0)
2695 && ! TREE_CONSTANT_OVERFLOW (arg1)
2696 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2697 TREE_REAL_CST (arg1)));
2703 if (TREE_CONSTANT_OVERFLOW (arg0)
2704 || TREE_CONSTANT_OVERFLOW (arg1))
2707 v1 = TREE_VECTOR_CST_ELTS (arg0);
2708 v2 = TREE_VECTOR_CST_ELTS (arg1);
2711 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2714 v1 = TREE_CHAIN (v1);
2715 v2 = TREE_CHAIN (v2);
2722 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2724 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2728 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2729 && ! memcmp (TREE_STRING_POINTER (arg0),
2730 TREE_STRING_POINTER (arg1),
2731 TREE_STRING_LENGTH (arg0)));
2734 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2740 if (flags & OEP_ONLY_CONST)
2743 /* Define macros to test an operand from arg0 and arg1 for equality and a
2744 variant that allows null and views null as being different from any
2745 non-null value. In the latter case, if either is null, the both
2746 must be; otherwise, do the normal comparison. */
2747 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2748 TREE_OPERAND (arg1, N), flags)
2750 #define OP_SAME_WITH_NULL(N) \
2751 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2752 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2754 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2757 /* Two conversions are equal only if signedness and modes match. */
2758 switch (TREE_CODE (arg0))
2763 case FIX_TRUNC_EXPR:
2764 case FIX_FLOOR_EXPR:
2765 case FIX_ROUND_EXPR:
2766 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2767 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2777 case tcc_comparison:
2779 if (OP_SAME (0) && OP_SAME (1))
2782 /* For commutative ops, allow the other order. */
2783 return (commutative_tree_code (TREE_CODE (arg0))
2784 && operand_equal_p (TREE_OPERAND (arg0, 0),
2785 TREE_OPERAND (arg1, 1), flags)
2786 && operand_equal_p (TREE_OPERAND (arg0, 1),
2787 TREE_OPERAND (arg1, 0), flags));
2790 /* If either of the pointer (or reference) expressions we are
2791 dereferencing contain a side effect, these cannot be equal. */
2792 if (TREE_SIDE_EFFECTS (arg0)
2793 || TREE_SIDE_EFFECTS (arg1))
2796 switch (TREE_CODE (arg0))
2799 case ALIGN_INDIRECT_REF:
2800 case MISALIGNED_INDIRECT_REF:
2806 case ARRAY_RANGE_REF:
2807 /* Operands 2 and 3 may be null.
2808 Compare the array index by value if it is constant first as we
2809 may have different types but same value here. */
2811 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2812 TREE_OPERAND (arg1, 1))
2814 && OP_SAME_WITH_NULL (2)
2815 && OP_SAME_WITH_NULL (3));
2818 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2819 may be NULL when we're called to compare MEM_EXPRs. */
2820 return OP_SAME_WITH_NULL (0)
2822 && OP_SAME_WITH_NULL (2);
2825 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2831 case tcc_expression:
2832 switch (TREE_CODE (arg0))
2835 case TRUTH_NOT_EXPR:
2838 case TRUTH_ANDIF_EXPR:
2839 case TRUTH_ORIF_EXPR:
2840 return OP_SAME (0) && OP_SAME (1);
2842 case TRUTH_AND_EXPR:
2844 case TRUTH_XOR_EXPR:
2845 if (OP_SAME (0) && OP_SAME (1))
2848 /* Otherwise take into account this is a commutative operation. */
2849 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2850 TREE_OPERAND (arg1, 1), flags)
2851 && operand_equal_p (TREE_OPERAND (arg0, 1),
2852 TREE_OPERAND (arg1, 0), flags));
2855 /* If the CALL_EXPRs call different functions, then they
2856 clearly can not be equal. */
2861 unsigned int cef = call_expr_flags (arg0);
2862 if (flags & OEP_PURE_SAME)
2863 cef &= ECF_CONST | ECF_PURE;
2870 /* Now see if all the arguments are the same. operand_equal_p
2871 does not handle TREE_LIST, so we walk the operands here
2872 feeding them to operand_equal_p. */
2873 arg0 = TREE_OPERAND (arg0, 1);
2874 arg1 = TREE_OPERAND (arg1, 1);
2875 while (arg0 && arg1)
2877 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2881 arg0 = TREE_CHAIN (arg0);
2882 arg1 = TREE_CHAIN (arg1);
2885 /* If we get here and both argument lists are exhausted
2886 then the CALL_EXPRs are equal. */
2887 return ! (arg0 || arg1);
2893 case tcc_declaration:
2894 /* Consider __builtin_sqrt equal to sqrt. */
2895 return (TREE_CODE (arg0) == FUNCTION_DECL
2896 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2897 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2898 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2905 #undef OP_SAME_WITH_NULL
2908 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2909 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2911 When in doubt, return 0. */
2914 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2916 int unsignedp1, unsignedpo;
2917 tree primarg0, primarg1, primother;
2918 unsigned int correct_width;
2920 if (operand_equal_p (arg0, arg1, 0))
2923 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2924 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2927 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2928 and see if the inner values are the same. This removes any
2929 signedness comparison, which doesn't matter here. */
2930 primarg0 = arg0, primarg1 = arg1;
2931 STRIP_NOPS (primarg0);
2932 STRIP_NOPS (primarg1);
2933 if (operand_equal_p (primarg0, primarg1, 0))
2936 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2937 actual comparison operand, ARG0.
2939 First throw away any conversions to wider types
2940 already present in the operands. */
2942 primarg1 = get_narrower (arg1, &unsignedp1);
2943 primother = get_narrower (other, &unsignedpo);
2945 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2946 if (unsignedp1 == unsignedpo
2947 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2948 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2950 tree type = TREE_TYPE (arg0);
2952 /* Make sure shorter operand is extended the right way
2953 to match the longer operand. */
2954 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2955 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2957 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2964 /* See if ARG is an expression that is either a comparison or is performing
2965 arithmetic on comparisons. The comparisons must only be comparing
2966 two different values, which will be stored in *CVAL1 and *CVAL2; if
2967 they are nonzero it means that some operands have already been found.
2968 No variables may be used anywhere else in the expression except in the
2969 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2970 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2972 If this is true, return 1. Otherwise, return zero. */
2975 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2977 enum tree_code code = TREE_CODE (arg);
2978 enum tree_code_class class = TREE_CODE_CLASS (code);
2980 /* We can handle some of the tcc_expression cases here. */
2981 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2983 else if (class == tcc_expression
2984 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2985 || code == COMPOUND_EXPR))
2988 else if (class == tcc_expression && code == SAVE_EXPR
2989 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2991 /* If we've already found a CVAL1 or CVAL2, this expression is
2992 two complex to handle. */
2993 if (*cval1 || *cval2)
3003 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3006 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3007 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3008 cval1, cval2, save_p));
3013 case tcc_expression:
3014 if (code == COND_EXPR)
3015 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3016 cval1, cval2, save_p)
3017 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3018 cval1, cval2, save_p)
3019 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3020 cval1, cval2, save_p));
3023 case tcc_comparison:
3024 /* First see if we can handle the first operand, then the second. For
3025 the second operand, we know *CVAL1 can't be zero. It must be that
3026 one side of the comparison is each of the values; test for the
3027 case where this isn't true by failing if the two operands
3030 if (operand_equal_p (TREE_OPERAND (arg, 0),
3031 TREE_OPERAND (arg, 1), 0))
3035 *cval1 = TREE_OPERAND (arg, 0);
3036 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3038 else if (*cval2 == 0)
3039 *cval2 = TREE_OPERAND (arg, 0);
3040 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3045 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3047 else if (*cval2 == 0)
3048 *cval2 = TREE_OPERAND (arg, 1);
3049 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3061 /* ARG is a tree that is known to contain just arithmetic operations and
3062 comparisons. Evaluate the operations in the tree substituting NEW0 for
3063 any occurrence of OLD0 as an operand of a comparison and likewise for
3067 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3069 tree type = TREE_TYPE (arg);
3070 enum tree_code code = TREE_CODE (arg);
3071 enum tree_code_class class = TREE_CODE_CLASS (code);
3073 /* We can handle some of the tcc_expression cases here. */
3074 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3076 else if (class == tcc_expression
3077 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3083 return fold_build1 (code, type,
3084 eval_subst (TREE_OPERAND (arg, 0),
3085 old0, new0, old1, new1));
3088 return fold_build2 (code, type,
3089 eval_subst (TREE_OPERAND (arg, 0),
3090 old0, new0, old1, new1),
3091 eval_subst (TREE_OPERAND (arg, 1),
3092 old0, new0, old1, new1));
3094 case tcc_expression:
3098 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3101 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3104 return fold_build3 (code, type,
3105 eval_subst (TREE_OPERAND (arg, 0),
3106 old0, new0, old1, new1),
3107 eval_subst (TREE_OPERAND (arg, 1),
3108 old0, new0, old1, new1),
3109 eval_subst (TREE_OPERAND (arg, 2),
3110 old0, new0, old1, new1));
3114 /* Fall through - ??? */
3116 case tcc_comparison:
3118 tree arg0 = TREE_OPERAND (arg, 0);
3119 tree arg1 = TREE_OPERAND (arg, 1);
3121 /* We need to check both for exact equality and tree equality. The
3122 former will be true if the operand has a side-effect. In that
3123 case, we know the operand occurred exactly once. */
3125 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3127 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3130 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3132 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3135 return fold_build2 (code, type, arg0, arg1);
3143 /* Return a tree for the case when the result of an expression is RESULT
3144 converted to TYPE and OMITTED was previously an operand of the expression
3145 but is now not needed (e.g., we folded OMITTED * 0).
3147 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3148 the conversion of RESULT to TYPE. */
3151 omit_one_operand (tree type, tree result, tree omitted)
3153 tree t = fold_convert (type, result);
3155 if (TREE_SIDE_EFFECTS (omitted))
3156 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3158 return non_lvalue (t);
3161 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3164 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3166 tree t = fold_convert (type, result);
3168 if (TREE_SIDE_EFFECTS (omitted))
3169 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3171 return pedantic_non_lvalue (t);
3174 /* Return a tree for the case when the result of an expression is RESULT
3175 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3176 of the expression but are now not needed.
3178 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3179 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3180 evaluated before OMITTED2. Otherwise, if neither has side effects,
3181 just do the conversion of RESULT to TYPE. */
3184 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3186 tree t = fold_convert (type, result);
3188 if (TREE_SIDE_EFFECTS (omitted2))
3189 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3190 if (TREE_SIDE_EFFECTS (omitted1))
3191 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3193 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3197 /* Return a simplified tree node for the truth-negation of ARG. This
3198 never alters ARG itself. We assume that ARG is an operation that
3199 returns a truth value (0 or 1).
3201 FIXME: one would think we would fold the result, but it causes
3202 problems with the dominator optimizer. */
3205 fold_truth_not_expr (tree arg)
3207 tree type = TREE_TYPE (arg);
3208 enum tree_code code = TREE_CODE (arg);
3210 /* If this is a comparison, we can simply invert it, except for
3211 floating-point non-equality comparisons, in which case we just
3212 enclose a TRUTH_NOT_EXPR around what we have. */
3214 if (TREE_CODE_CLASS (code) == tcc_comparison)
3216 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3217 if (FLOAT_TYPE_P (op_type)
3218 && flag_trapping_math
3219 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3220 && code != NE_EXPR && code != EQ_EXPR)
3224 code = invert_tree_comparison (code,
3225 HONOR_NANS (TYPE_MODE (op_type)));
3226 if (code == ERROR_MARK)
3229 return build2 (code, type,
3230 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3237 return constant_boolean_node (integer_zerop (arg), type);
3239 case TRUTH_AND_EXPR:
3240 return build2 (TRUTH_OR_EXPR, type,
3241 invert_truthvalue (TREE_OPERAND (arg, 0)),
3242 invert_truthvalue (TREE_OPERAND (arg, 1)));
3245 return build2 (TRUTH_AND_EXPR, type,
3246 invert_truthvalue (TREE_OPERAND (arg, 0)),
3247 invert_truthvalue (TREE_OPERAND (arg, 1)));
3249 case TRUTH_XOR_EXPR:
3250 /* Here we can invert either operand. We invert the first operand
3251 unless the second operand is a TRUTH_NOT_EXPR in which case our
3252 result is the XOR of the first operand with the inside of the
3253 negation of the second operand. */
3255 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3256 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3257 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3259 return build2 (TRUTH_XOR_EXPR, type,
3260 invert_truthvalue (TREE_OPERAND (arg, 0)),
3261 TREE_OPERAND (arg, 1));
3263 case TRUTH_ANDIF_EXPR:
3264 return build2 (TRUTH_ORIF_EXPR, type,
3265 invert_truthvalue (TREE_OPERAND (arg, 0)),
3266 invert_truthvalue (TREE_OPERAND (arg, 1)));
3268 case TRUTH_ORIF_EXPR:
3269 return build2 (TRUTH_ANDIF_EXPR, type,
3270 invert_truthvalue (TREE_OPERAND (arg, 0)),
3271 invert_truthvalue (TREE_OPERAND (arg, 1)));
3273 case TRUTH_NOT_EXPR:
3274 return TREE_OPERAND (arg, 0);
3278 tree arg1 = TREE_OPERAND (arg, 1);
3279 tree arg2 = TREE_OPERAND (arg, 2);
3280 /* A COND_EXPR may have a throw as one operand, which
3281 then has void type. Just leave void operands
3283 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3284 VOID_TYPE_P (TREE_TYPE (arg1))
3285 ? arg1 : invert_truthvalue (arg1),
3286 VOID_TYPE_P (TREE_TYPE (arg2))
3287 ? arg2 : invert_truthvalue (arg2));
3291 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3292 invert_truthvalue (TREE_OPERAND (arg, 1)));
3294 case NON_LVALUE_EXPR:
3295 return invert_truthvalue (TREE_OPERAND (arg, 0));
3298 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3299 return build1 (TRUTH_NOT_EXPR, type, arg);
3303 return build1 (TREE_CODE (arg), type,
3304 invert_truthvalue (TREE_OPERAND (arg, 0)));
3307 if (!integer_onep (TREE_OPERAND (arg, 1)))
3309 return build2 (EQ_EXPR, type, arg,
3310 build_int_cst (type, 0));
3313 return build1 (TRUTH_NOT_EXPR, type, arg);
3315 case CLEANUP_POINT_EXPR:
3316 return build1 (CLEANUP_POINT_EXPR, type,
3317 invert_truthvalue (TREE_OPERAND (arg, 0)));
3326 /* Return a simplified tree node for the truth-negation of ARG. This
3327 never alters ARG itself. We assume that ARG is an operation that
3328 returns a truth value (0 or 1).
3330 FIXME: one would think we would fold the result, but it causes
3331 problems with the dominator optimizer. */
3334 invert_truthvalue (tree arg)
3338 if (TREE_CODE (arg) == ERROR_MARK)
3341 tem = fold_truth_not_expr (arg);
3343 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3348 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3349 operands are another bit-wise operation with a common input. If so,
3350 distribute the bit operations to save an operation and possibly two if
3351 constants are involved. For example, convert
3352 (A | B) & (A | C) into A | (B & C)
3353 Further simplification will occur if B and C are constants.
3355 If this optimization cannot be done, 0 will be returned. */
3358 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3363 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3364 || TREE_CODE (arg0) == code
3365 || (TREE_CODE (arg0) != BIT_AND_EXPR
3366 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3369 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3371 common = TREE_OPERAND (arg0, 0);
3372 left = TREE_OPERAND (arg0, 1);
3373 right = TREE_OPERAND (arg1, 1);
3375 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3377 common = TREE_OPERAND (arg0, 0);
3378 left = TREE_OPERAND (arg0, 1);
3379 right = TREE_OPERAND (arg1, 0);
3381 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3383 common = TREE_OPERAND (arg0, 1);
3384 left = TREE_OPERAND (arg0, 0);
3385 right = TREE_OPERAND (arg1, 1);
3387 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3389 common = TREE_OPERAND (arg0, 1);
3390 left = TREE_OPERAND (arg0, 0);
3391 right = TREE_OPERAND (arg1, 0);
3396 return fold_build2 (TREE_CODE (arg0), type, common,
3397 fold_build2 (code, type, left, right));
3400 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3401 with code CODE. This optimization is unsafe. */
3403 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3405 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3406 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3408 /* (A / C) +- (B / C) -> (A +- B) / C. */
3410 && operand_equal_p (TREE_OPERAND (arg0, 1),
3411 TREE_OPERAND (arg1, 1), 0))
3412 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3413 fold_build2 (code, type,
3414 TREE_OPERAND (arg0, 0),
3415 TREE_OPERAND (arg1, 0)),
3416 TREE_OPERAND (arg0, 1));
3418 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3419 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3420 TREE_OPERAND (arg1, 0), 0)
3421 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3422 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3424 REAL_VALUE_TYPE r0, r1;
3425 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3426 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3428 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3430 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3431 real_arithmetic (&r0, code, &r0, &r1);
3432 return fold_build2 (MULT_EXPR, type,
3433 TREE_OPERAND (arg0, 0),
3434 build_real (type, r0));
3440 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3441 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3444 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3451 tree size = TYPE_SIZE (TREE_TYPE (inner));
3452 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3453 || POINTER_TYPE_P (TREE_TYPE (inner)))
3454 && host_integerp (size, 0)
3455 && tree_low_cst (size, 0) == bitsize)
3456 return fold_convert (type, inner);
3459 result = build3 (BIT_FIELD_REF, type, inner,
3460 size_int (bitsize), bitsize_int (bitpos));
3462 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3467 /* Optimize a bit-field compare.
3469 There are two cases: First is a compare against a constant and the
3470 second is a comparison of two items where the fields are at the same
3471 bit position relative to the start of a chunk (byte, halfword, word)
3472 large enough to contain it. In these cases we can avoid the shift
3473 implicit in bitfield extractions.
3475 For constants, we emit a compare of the shifted constant with the
3476 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3477 compared. For two fields at the same position, we do the ANDs with the
3478 similar mask and compare the result of the ANDs.
3480 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3481 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3482 are the left and right operands of the comparison, respectively.
3484 If the optimization described above can be done, we return the resulting
3485 tree. Otherwise we return zero. */
3488 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3491 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3492 tree type = TREE_TYPE (lhs);
3493 tree signed_type, unsigned_type;
3494 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3495 enum machine_mode lmode, rmode, nmode;
3496 int lunsignedp, runsignedp;
3497 int lvolatilep = 0, rvolatilep = 0;
3498 tree linner, rinner = NULL_TREE;
3502 /* Get all the information about the extractions being done. If the bit size
3503 if the same as the size of the underlying object, we aren't doing an
3504 extraction at all and so can do nothing. We also don't want to
3505 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3506 then will no longer be able to replace it. */
3507 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3508 &lunsignedp, &lvolatilep, false);
3509 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3510 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3515 /* If this is not a constant, we can only do something if bit positions,
3516 sizes, and signedness are the same. */
3517 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3518 &runsignedp, &rvolatilep, false);
3520 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3521 || lunsignedp != runsignedp || offset != 0
3522 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3526 /* See if we can find a mode to refer to this field. We should be able to,
3527 but fail if we can't. */
3528 nmode = get_best_mode (lbitsize, lbitpos,
3529 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3530 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3531 TYPE_ALIGN (TREE_TYPE (rinner))),
3532 word_mode, lvolatilep || rvolatilep);
3533 if (nmode == VOIDmode)
3536 /* Set signed and unsigned types of the precision of this mode for the
3538 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3539 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3541 /* Compute the bit position and size for the new reference and our offset
3542 within it. If the new reference is the same size as the original, we
3543 won't optimize anything, so return zero. */
3544 nbitsize = GET_MODE_BITSIZE (nmode);
3545 nbitpos = lbitpos & ~ (nbitsize - 1);
3547 if (nbitsize == lbitsize)
3550 if (BYTES_BIG_ENDIAN)
3551 lbitpos = nbitsize - lbitsize - lbitpos;
3553 /* Make the mask to be used against the extracted field. */
3554 mask = build_int_cst (unsigned_type, -1);
3555 mask = force_fit_type (mask, 0, false, false);
3556 mask = fold_convert (unsigned_type, mask);
3557 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3558 mask = const_binop (RSHIFT_EXPR, mask,
3559 size_int (nbitsize - lbitsize - lbitpos), 0);
3562 /* If not comparing with constant, just rework the comparison
3564 return build2 (code, compare_type,
3565 build2 (BIT_AND_EXPR, unsigned_type,
3566 make_bit_field_ref (linner, unsigned_type,
3567 nbitsize, nbitpos, 1),
3569 build2 (BIT_AND_EXPR, unsigned_type,
3570 make_bit_field_ref (rinner, unsigned_type,
3571 nbitsize, nbitpos, 1),
3574 /* Otherwise, we are handling the constant case. See if the constant is too
3575 big for the field. Warn and return a tree of for 0 (false) if so. We do
3576 this not only for its own sake, but to avoid having to test for this
3577 error case below. If we didn't, we might generate wrong code.
3579 For unsigned fields, the constant shifted right by the field length should
3580 be all zero. For signed fields, the high-order bits should agree with
3585 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3586 fold_convert (unsigned_type, rhs),
3587 size_int (lbitsize), 0)))
3589 warning (0, "comparison is always %d due to width of bit-field",
3591 return constant_boolean_node (code == NE_EXPR, compare_type);
3596 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3597 size_int (lbitsize - 1), 0);
3598 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3600 warning (0, "comparison is always %d due to width of bit-field",
3602 return constant_boolean_node (code == NE_EXPR, compare_type);
3606 /* Single-bit compares should always be against zero. */
3607 if (lbitsize == 1 && ! integer_zerop (rhs))
3609 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3610 rhs = build_int_cst (type, 0);
3613 /* Make a new bitfield reference, shift the constant over the
3614 appropriate number of bits and mask it with the computed mask
3615 (in case this was a signed field). If we changed it, make a new one. */
3616 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3619 TREE_SIDE_EFFECTS (lhs) = 1;
3620 TREE_THIS_VOLATILE (lhs) = 1;
3623 rhs = const_binop (BIT_AND_EXPR,
3624 const_binop (LSHIFT_EXPR,
3625 fold_convert (unsigned_type, rhs),
3626 size_int (lbitpos), 0),
3629 return build2 (code, compare_type,
3630 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3634 /* Subroutine for fold_truthop: decode a field reference.
3636 If EXP is a comparison reference, we return the innermost reference.
3638 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3639 set to the starting bit number.
3641 If the innermost field can be completely contained in a mode-sized
3642 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3644 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3645 otherwise it is not changed.
3647 *PUNSIGNEDP is set to the signedness of the field.
3649 *PMASK is set to the mask used. This is either contained in a
3650 BIT_AND_EXPR or derived from the width of the field.
3652 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3654 Return 0 if this is not a component reference or is one that we can't
3655 do anything with. */
3658 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3659 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3660 int *punsignedp, int *pvolatilep,
3661 tree *pmask, tree *pand_mask)
3663 tree outer_type = 0;
3665 tree mask, inner, offset;
3667 unsigned int precision;
3669 /* All the optimizations using this function assume integer fields.
3670 There are problems with FP fields since the type_for_size call
3671 below can fail for, e.g., XFmode. */
3672 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3675 /* We are interested in the bare arrangement of bits, so strip everything
3676 that doesn't affect the machine mode. However, record the type of the
3677 outermost expression if it may matter below. */
3678 if (TREE_CODE (exp) == NOP_EXPR
3679 || TREE_CODE (exp) == CONVERT_EXPR
3680 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3681 outer_type = TREE_TYPE (exp);
3684 if (TREE_CODE (exp) == BIT_AND_EXPR)
3686 and_mask = TREE_OPERAND (exp, 1);
3687 exp = TREE_OPERAND (exp, 0);
3688 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3689 if (TREE_CODE (and_mask) != INTEGER_CST)
3693 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3694 punsignedp, pvolatilep, false);
3695 if ((inner == exp && and_mask == 0)
3696 || *pbitsize < 0 || offset != 0
3697 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3700 /* If the number of bits in the reference is the same as the bitsize of
3701 the outer type, then the outer type gives the signedness. Otherwise
3702 (in case of a small bitfield) the signedness is unchanged. */
3703 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3704 *punsignedp = TYPE_UNSIGNED (outer_type);
3706 /* Compute the mask to access the bitfield. */
3707 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3708 precision = TYPE_PRECISION (unsigned_type);
3710 mask = build_int_cst (unsigned_type, -1);
3711 mask = force_fit_type (mask, 0, false, false);
3713 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3714 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3716 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3718 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3719 fold_convert (unsigned_type, and_mask), mask);
3722 *pand_mask = and_mask;
3726 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3730 all_ones_mask_p (tree mask, int size)
3732 tree type = TREE_TYPE (mask);
3733 unsigned int precision = TYPE_PRECISION (type);
3736 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3737 tmask = force_fit_type (tmask, 0, false, false);
3740 tree_int_cst_equal (mask,
3741 const_binop (RSHIFT_EXPR,
3742 const_binop (LSHIFT_EXPR, tmask,
3743 size_int (precision - size),
3745 size_int (precision - size), 0));
3748 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3749 represents the sign bit of EXP's type. If EXP represents a sign
3750 or zero extension, also test VAL against the unextended type.
3751 The return value is the (sub)expression whose sign bit is VAL,
3752 or NULL_TREE otherwise. */
3755 sign_bit_p (tree exp, tree val)
3757 unsigned HOST_WIDE_INT mask_lo, lo;
3758 HOST_WIDE_INT mask_hi, hi;
3762 /* Tree EXP must have an integral type. */
3763 t = TREE_TYPE (exp);
3764 if (! INTEGRAL_TYPE_P (t))
3767 /* Tree VAL must be an integer constant. */
3768 if (TREE_CODE (val) != INTEGER_CST
3769 || TREE_CONSTANT_OVERFLOW (val))
3772 width = TYPE_PRECISION (t);
3773 if (width > HOST_BITS_PER_WIDE_INT)
3775 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3778 mask_hi = ((unsigned HOST_WIDE_INT) -1
3779 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3785 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3788 mask_lo = ((unsigned HOST_WIDE_INT) -1
3789 >> (HOST_BITS_PER_WIDE_INT - width));
3792 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3793 treat VAL as if it were unsigned. */
3794 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3795 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3798 /* Handle extension from a narrower type. */
3799 if (TREE_CODE (exp) == NOP_EXPR
3800 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3801 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3806 /* Subroutine for fold_truthop: determine if an operand is simple enough
3807 to be evaluated unconditionally. */
3810 simple_operand_p (tree exp)
3812 /* Strip any conversions that don't change the machine mode. */
3815 return (CONSTANT_CLASS_P (exp)
3816 || TREE_CODE (exp) == SSA_NAME
3818 && ! TREE_ADDRESSABLE (exp)
3819 && ! TREE_THIS_VOLATILE (exp)
3820 && ! DECL_NONLOCAL (exp)
3821 /* Don't regard global variables as simple. They may be
3822 allocated in ways unknown to the compiler (shared memory,
3823 #pragma weak, etc). */
3824 && ! TREE_PUBLIC (exp)
3825 && ! DECL_EXTERNAL (exp)
3826 /* Loading a static variable is unduly expensive, but global
3827 registers aren't expensive. */
3828 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3831 /* The following functions are subroutines to fold_range_test and allow it to
3832 try to change a logical combination of comparisons into a range test.
3835 X == 2 || X == 3 || X == 4 || X == 5
3839 (unsigned) (X - 2) <= 3
3841 We describe each set of comparisons as being either inside or outside
3842 a range, using a variable named like IN_P, and then describe the
3843 range with a lower and upper bound. If one of the bounds is omitted,
3844 it represents either the highest or lowest value of the type.
3846 In the comments below, we represent a range by two numbers in brackets
3847 preceded by a "+" to designate being inside that range, or a "-" to
3848 designate being outside that range, so the condition can be inverted by
3849 flipping the prefix. An omitted bound is represented by a "-". For
3850 example, "- [-, 10]" means being outside the range starting at the lowest
3851 possible value and ending at 10, in other words, being greater than 10.
3852 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3855 We set up things so that the missing bounds are handled in a consistent
3856 manner so neither a missing bound nor "true" and "false" need to be
3857 handled using a special case. */
3859 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3860 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3861 and UPPER1_P are nonzero if the respective argument is an upper bound
3862 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3863 must be specified for a comparison. ARG1 will be converted to ARG0's
3864 type if both are specified. */
3867 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3868 tree arg1, int upper1_p)
3874 /* If neither arg represents infinity, do the normal operation.
3875 Else, if not a comparison, return infinity. Else handle the special
3876 comparison rules. Note that most of the cases below won't occur, but
3877 are handled for consistency. */
3879 if (arg0 != 0 && arg1 != 0)
3881 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3882 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3884 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3887 if (TREE_CODE_CLASS (code) != tcc_comparison)
3890 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3891 for neither. In real maths, we cannot assume open ended ranges are
3892 the same. But, this is computer arithmetic, where numbers are finite.
3893 We can therefore make the transformation of any unbounded range with
3894 the value Z, Z being greater than any representable number. This permits
3895 us to treat unbounded ranges as equal. */
3896 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3897 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3901 result = sgn0 == sgn1;
3904 result = sgn0 != sgn1;
3907 result = sgn0 < sgn1;
3910 result = sgn0 <= sgn1;
3913 result = sgn0 > sgn1;
3916 result = sgn0 >= sgn1;
3922 return constant_boolean_node (result, type);
3925 /* Given EXP, a logical expression, set the range it is testing into
3926 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3927 actually being tested. *PLOW and *PHIGH will be made of the same
3928 type as the returned expression. If EXP is not a comparison, we
3929 will most likely not be returning a useful value and range. Set
3930 *STRICT_OVERFLOW_P to true if the return value is only valid
3931 because signed overflow is undefined; otherwise, do not change
3932 *STRICT_OVERFLOW_P. */
3935 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
3936 bool *strict_overflow_p)
3938 enum tree_code code;
3939 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3940 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3942 tree low, high, n_low, n_high;
3944 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3945 and see if we can refine the range. Some of the cases below may not
3946 happen, but it doesn't seem worth worrying about this. We "continue"
3947 the outer loop when we've changed something; otherwise we "break"
3948 the switch, which will "break" the while. */
3951 low = high = build_int_cst (TREE_TYPE (exp), 0);
3955 code = TREE_CODE (exp);
3956 exp_type = TREE_TYPE (exp);
3958 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3960 if (TREE_CODE_LENGTH (code) > 0)
3961 arg0 = TREE_OPERAND (exp, 0);
3962 if (TREE_CODE_CLASS (code) == tcc_comparison
3963 || TREE_CODE_CLASS (code) == tcc_unary
3964 || TREE_CODE_CLASS (code) == tcc_binary)
3965 arg0_type = TREE_TYPE (arg0);
3966 if (TREE_CODE_CLASS (code) == tcc_binary
3967 || TREE_CODE_CLASS (code) == tcc_comparison
3968 || (TREE_CODE_CLASS (code) == tcc_expression
3969 && TREE_CODE_LENGTH (code) > 1))
3970 arg1 = TREE_OPERAND (exp, 1);
3975 case TRUTH_NOT_EXPR:
3976 in_p = ! in_p, exp = arg0;
3979 case EQ_EXPR: case NE_EXPR:
3980 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3981 /* We can only do something if the range is testing for zero
3982 and if the second operand is an integer constant. Note that
3983 saying something is "in" the range we make is done by
3984 complementing IN_P since it will set in the initial case of
3985 being not equal to zero; "out" is leaving it alone. */
3986 if (low == 0 || high == 0
3987 || ! integer_zerop (low) || ! integer_zerop (high)
3988 || TREE_CODE (arg1) != INTEGER_CST)
3993 case NE_EXPR: /* - [c, c] */
3996 case EQ_EXPR: /* + [c, c] */
3997 in_p = ! in_p, low = high = arg1;
3999 case GT_EXPR: /* - [-, c] */
4000 low = 0, high = arg1;
4002 case GE_EXPR: /* + [c, -] */
4003 in_p = ! in_p, low = arg1, high = 0;
4005 case LT_EXPR: /* - [c, -] */
4006 low = arg1, high = 0;
4008 case LE_EXPR: /* + [-, c] */
4009 in_p = ! in_p, low = 0, high = arg1;
4015 /* If this is an unsigned comparison, we also know that EXP is
4016 greater than or equal to zero. We base the range tests we make
4017 on that fact, so we record it here so we can parse existing
4018 range tests. We test arg0_type since often the return type
4019 of, e.g. EQ_EXPR, is boolean. */
4020 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4022 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4024 build_int_cst (arg0_type, 0),
4028 in_p = n_in_p, low = n_low, high = n_high;
4030 /* If the high bound is missing, but we have a nonzero low
4031 bound, reverse the range so it goes from zero to the low bound
4033 if (high == 0 && low && ! integer_zerop (low))
4036 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4037 integer_one_node, 0);
4038 low = build_int_cst (arg0_type, 0);
4046 /* (-x) IN [a,b] -> x in [-b, -a] */
4047 n_low = range_binop (MINUS_EXPR, exp_type,
4048 build_int_cst (exp_type, 0),
4050 n_high = range_binop (MINUS_EXPR, exp_type,
4051 build_int_cst (exp_type, 0),
4053 low = n_low, high = n_high;
4059 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4060 build_int_cst (exp_type, 1));
4063 case PLUS_EXPR: case MINUS_EXPR:
4064 if (TREE_CODE (arg1) != INTEGER_CST)
4067 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4068 move a constant to the other side. */
4069 if (!TYPE_UNSIGNED (arg0_type)
4070 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4073 /* If EXP is signed, any overflow in the computation is undefined,
4074 so we don't worry about it so long as our computations on
4075 the bounds don't overflow. For unsigned, overflow is defined
4076 and this is exactly the right thing. */
4077 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4078 arg0_type, low, 0, arg1, 0);
4079 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4080 arg0_type, high, 1, arg1, 0);
4081 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4082 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4085 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4086 *strict_overflow_p = true;
4088 /* Check for an unsigned range which has wrapped around the maximum
4089 value thus making n_high < n_low, and normalize it. */
4090 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4092 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4093 integer_one_node, 0);
4094 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4095 integer_one_node, 0);
4097 /* If the range is of the form +/- [ x+1, x ], we won't
4098 be able to normalize it. But then, it represents the
4099 whole range or the empty set, so make it
4101 if (tree_int_cst_equal (n_low, low)
4102 && tree_int_cst_equal (n_high, high))
4108 low = n_low, high = n_high;
4113 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4114 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4117 if (! INTEGRAL_TYPE_P (arg0_type)
4118 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4119 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4122 n_low = low, n_high = high;
4125 n_low = fold_convert (arg0_type, n_low);
4128 n_high = fold_convert (arg0_type, n_high);
4131 /* If we're converting arg0 from an unsigned type, to exp,
4132 a signed type, we will be doing the comparison as unsigned.
4133 The tests above have already verified that LOW and HIGH
4136 So we have to ensure that we will handle large unsigned
4137 values the same way that the current signed bounds treat
4140 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4143 tree equiv_type = lang_hooks.types.type_for_mode
4144 (TYPE_MODE (arg0_type), 1);
4146 /* A range without an upper bound is, naturally, unbounded.
4147 Since convert would have cropped a very large value, use
4148 the max value for the destination type. */
4150 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4151 : TYPE_MAX_VALUE (arg0_type);
4153 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4154 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4155 fold_convert (arg0_type,
4157 fold_convert (arg0_type,
4160 /* If the low bound is specified, "and" the range with the
4161 range for which the original unsigned value will be
4165 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4166 1, n_low, n_high, 1,
4167 fold_convert (arg0_type,
4172 in_p = (n_in_p == in_p);
4176 /* Otherwise, "or" the range with the range of the input
4177 that will be interpreted as negative. */
4178 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4179 0, n_low, n_high, 1,
4180 fold_convert (arg0_type,
4185 in_p = (in_p != n_in_p);
4190 low = n_low, high = n_high;
4200 /* If EXP is a constant, we can evaluate whether this is true or false. */
4201 if (TREE_CODE (exp) == INTEGER_CST)
4203 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4205 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4211 *pin_p = in_p, *plow = low, *phigh = high;
4215 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4216 type, TYPE, return an expression to test if EXP is in (or out of, depending
4217 on IN_P) the range. Return 0 if the test couldn't be created. */
4220 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4222 tree etype = TREE_TYPE (exp);
4225 #ifdef HAVE_canonicalize_funcptr_for_compare
4226 /* Disable this optimization for function pointer expressions
4227 on targets that require function pointer canonicalization. */
4228 if (HAVE_canonicalize_funcptr_for_compare
4229 && TREE_CODE (etype) == POINTER_TYPE
4230 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4236 value = build_range_check (type, exp, 1, low, high);
4238 return invert_truthvalue (value);
4243 if (low == 0 && high == 0)
4244 return build_int_cst (type, 1);
4247 return fold_build2 (LE_EXPR, type, exp,
4248 fold_convert (etype, high));
4251 return fold_build2 (GE_EXPR, type, exp,
4252 fold_convert (etype, low));
4254 if (operand_equal_p (low, high, 0))
4255 return fold_build2 (EQ_EXPR, type, exp,
4256 fold_convert (etype, low));
4258 if (integer_zerop (low))
4260 if (! TYPE_UNSIGNED (etype))
4262 etype = lang_hooks.types.unsigned_type (etype);
4263 high = fold_convert (etype, high);
4264 exp = fold_convert (etype, exp);
4266 return build_range_check (type, exp, 1, 0, high);
4269 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4270 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4272 unsigned HOST_WIDE_INT lo;
4276 prec = TYPE_PRECISION (etype);
4277 if (prec <= HOST_BITS_PER_WIDE_INT)
4280 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4284 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4285 lo = (unsigned HOST_WIDE_INT) -1;
4288 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4290 if (TYPE_UNSIGNED (etype))
4292 etype = lang_hooks.types.signed_type (etype);
4293 exp = fold_convert (etype, exp);
4295 return fold_build2 (GT_EXPR, type, exp,
4296 build_int_cst (etype, 0));
4300 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4301 This requires wrap-around arithmetics for the type of the expression. */
4302 switch (TREE_CODE (etype))
4305 /* There is no requirement that LOW be within the range of ETYPE
4306 if the latter is a subtype. It must, however, be within the base
4307 type of ETYPE. So be sure we do the subtraction in that type. */
4308 if (TREE_TYPE (etype))
4309 etype = TREE_TYPE (etype);
4314 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4315 TYPE_UNSIGNED (etype));
4322 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4323 if (TREE_CODE (etype) == INTEGER_TYPE
4324 && !TYPE_OVERFLOW_WRAPS (etype))
4326 tree utype, minv, maxv;
4328 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4329 for the type in question, as we rely on this here. */
4330 utype = lang_hooks.types.unsigned_type (etype);
4331 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4332 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4333 integer_one_node, 1);
4334 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4336 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4343 high = fold_convert (etype, high);
4344 low = fold_convert (etype, low);
4345 exp = fold_convert (etype, exp);
4347 value = const_binop (MINUS_EXPR, high, low, 0);
4349 if (value != 0 && !TREE_OVERFLOW (value))
4350 return build_range_check (type,
4351 fold_build2 (MINUS_EXPR, etype, exp, low),
4352 1, build_int_cst (etype, 0), value);
4357 /* Return the predecessor of VAL in its type, handling the infinite case. */
4360 range_predecessor (tree val)
4362 tree type = TREE_TYPE (val);
4364 if (INTEGRAL_TYPE_P (type)
4365 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4368 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4371 /* Return the successor of VAL in its type, handling the infinite case. */
4374 range_successor (tree val)
4376 tree type = TREE_TYPE (val);
4378 if (INTEGRAL_TYPE_P (type)
4379 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4382 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4385 /* Given two ranges, see if we can merge them into one. Return 1 if we
4386 can, 0 if we can't. Set the output range into the specified parameters. */
4389 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4390 tree high0, int in1_p, tree low1, tree high1)
4398 int lowequal = ((low0 == 0 && low1 == 0)
4399 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4400 low0, 0, low1, 0)));
4401 int highequal = ((high0 == 0 && high1 == 0)
4402 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4403 high0, 1, high1, 1)));
4405 /* Make range 0 be the range that starts first, or ends last if they
4406 start at the same value. Swap them if it isn't. */
4407 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4410 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4411 high1, 1, high0, 1))))
4413 temp = in0_p, in0_p = in1_p, in1_p = temp;
4414 tem = low0, low0 = low1, low1 = tem;
4415 tem = high0, high0 = high1, high1 = tem;
4418 /* Now flag two cases, whether the ranges are disjoint or whether the
4419 second range is totally subsumed in the first. Note that the tests
4420 below are simplified by the ones above. */
4421 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4422 high0, 1, low1, 0));
4423 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4424 high1, 1, high0, 1));
4426 /* We now have four cases, depending on whether we are including or
4427 excluding the two ranges. */
4430 /* If they don't overlap, the result is false. If the second range
4431 is a subset it is the result. Otherwise, the range is from the start
4432 of the second to the end of the first. */
4434 in_p = 0, low = high = 0;
4436 in_p = 1, low = low1, high = high1;
4438 in_p = 1, low = low1, high = high0;
4441 else if (in0_p && ! in1_p)
4443 /* If they don't overlap, the result is the first range. If they are
4444 equal, the result is false. If the second range is a subset of the
4445 first, and the ranges begin at the same place, we go from just after
4446 the end of the second range to the end of the first. If the second
4447 range is not a subset of the first, or if it is a subset and both
4448 ranges end at the same place, the range starts at the start of the
4449 first range and ends just before the second range.
4450 Otherwise, we can't describe this as a single range. */
4452 in_p = 1, low = low0, high = high0;
4453 else if (lowequal && highequal)
4454 in_p = 0, low = high = 0;
4455 else if (subset && lowequal)
4457 low = range_successor (high1);
4462 /* We are in the weird situation where high0 > high1 but
4463 high1 has no successor. Punt. */
4467 else if (! subset || highequal)
4470 high = range_predecessor (low1);
4474 /* low0 < low1 but low1 has no predecessor. Punt. */
4482 else if (! in0_p && in1_p)
4484 /* If they don't overlap, the result is the second range. If the second
4485 is a subset of the first, the result is false. Otherwise,
4486 the range starts just after the first range and ends at the
4487 end of the second. */
4489 in_p = 1, low = low1, high = high1;
4490 else if (subset || highequal)
4491 in_p = 0, low = high = 0;
4494 low = range_successor (high0);
4499 /* high1 > high0 but high0 has no successor. Punt. */
4507 /* The case where we are excluding both ranges. Here the complex case
4508 is if they don't overlap. In that case, the only time we have a
4509 range is if they are adjacent. If the second is a subset of the
4510 first, the result is the first. Otherwise, the range to exclude
4511 starts at the beginning of the first range and ends at the end of the
4515 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4516 range_successor (high0),
4518 in_p = 0, low = low0, high = high1;
4521 /* Canonicalize - [min, x] into - [-, x]. */
4522 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4523 switch (TREE_CODE (TREE_TYPE (low0)))
4526 if (TYPE_PRECISION (TREE_TYPE (low0))
4527 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4531 if (tree_int_cst_equal (low0,
4532 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4536 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4537 && integer_zerop (low0))
4544 /* Canonicalize - [x, max] into - [x, -]. */
4545 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4546 switch (TREE_CODE (TREE_TYPE (high1)))
4549 if (TYPE_PRECISION (TREE_TYPE (high1))
4550 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4554 if (tree_int_cst_equal (high1,
4555 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4559 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4560 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4562 integer_one_node, 1)))
4569 /* The ranges might be also adjacent between the maximum and
4570 minimum values of the given type. For
4571 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4572 return + [x + 1, y - 1]. */
4573 if (low0 == 0 && high1 == 0)
4575 low = range_successor (high0);
4576 high = range_predecessor (low1);
4577 if (low == 0 || high == 0)
4587 in_p = 0, low = low0, high = high0;
4589 in_p = 0, low = low0, high = high1;
4592 *pin_p = in_p, *plow = low, *phigh = high;
4597 /* Subroutine of fold, looking inside expressions of the form
4598 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4599 of the COND_EXPR. This function is being used also to optimize
4600 A op B ? C : A, by reversing the comparison first.
4602 Return a folded expression whose code is not a COND_EXPR
4603 anymore, or NULL_TREE if no folding opportunity is found. */
4606 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4608 enum tree_code comp_code = TREE_CODE (arg0);
4609 tree arg00 = TREE_OPERAND (arg0, 0);
4610 tree arg01 = TREE_OPERAND (arg0, 1);
4611 tree arg1_type = TREE_TYPE (arg1);
4617 /* If we have A op 0 ? A : -A, consider applying the following
4620 A == 0? A : -A same as -A
4621 A != 0? A : -A same as A
4622 A >= 0? A : -A same as abs (A)
4623 A > 0? A : -A same as abs (A)
4624 A <= 0? A : -A same as -abs (A)
4625 A < 0? A : -A same as -abs (A)
4627 None of these transformations work for modes with signed
4628 zeros. If A is +/-0, the first two transformations will
4629 change the sign of the result (from +0 to -0, or vice
4630 versa). The last four will fix the sign of the result,
4631 even though the original expressions could be positive or
4632 negative, depending on the sign of A.
4634 Note that all these transformations are correct if A is
4635 NaN, since the two alternatives (A and -A) are also NaNs. */
4636 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4637 ? real_zerop (arg01)
4638 : integer_zerop (arg01))
4639 && ((TREE_CODE (arg2) == NEGATE_EXPR
4640 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4641 /* In the case that A is of the form X-Y, '-A' (arg2) may
4642 have already been folded to Y-X, check for that. */
4643 || (TREE_CODE (arg1) == MINUS_EXPR
4644 && TREE_CODE (arg2) == MINUS_EXPR
4645 && operand_equal_p (TREE_OPERAND (arg1, 0),
4646 TREE_OPERAND (arg2, 1), 0)
4647 && operand_equal_p (TREE_OPERAND (arg1, 1),
4648 TREE_OPERAND (arg2, 0), 0))))
4653 tem = fold_convert (arg1_type, arg1);
4654 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4657 return pedantic_non_lvalue (fold_convert (type, arg1));
4660 if (flag_trapping_math)
4665 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4666 arg1 = fold_convert (lang_hooks.types.signed_type
4667 (TREE_TYPE (arg1)), arg1);
4668 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4669 return pedantic_non_lvalue (fold_convert (type, tem));
4672 if (flag_trapping_math)
4676 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4677 arg1 = fold_convert (lang_hooks.types.signed_type
4678 (TREE_TYPE (arg1)), arg1);
4679 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4680 return negate_expr (fold_convert (type, tem));
4682 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4686 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4687 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4688 both transformations are correct when A is NaN: A != 0
4689 is then true, and A == 0 is false. */
4691 if (integer_zerop (arg01) && integer_zerop (arg2))
4693 if (comp_code == NE_EXPR)
4694 return pedantic_non_lvalue (fold_convert (type, arg1));
4695 else if (comp_code == EQ_EXPR)
4696 return build_int_cst (type, 0);
4699 /* Try some transformations of A op B ? A : B.
4701 A == B? A : B same as B
4702 A != B? A : B same as A
4703 A >= B? A : B same as max (A, B)
4704 A > B? A : B same as max (B, A)
4705 A <= B? A : B same as min (A, B)
4706 A < B? A : B same as min (B, A)
4708 As above, these transformations don't work in the presence
4709 of signed zeros. For example, if A and B are zeros of
4710 opposite sign, the first two transformations will change
4711 the sign of the result. In the last four, the original
4712 expressions give different results for (A=+0, B=-0) and
4713 (A=-0, B=+0), but the transformed expressions do not.
4715 The first two transformations are correct if either A or B
4716 is a NaN. In the first transformation, the condition will
4717 be false, and B will indeed be chosen. In the case of the
4718 second transformation, the condition A != B will be true,
4719 and A will be chosen.
4721 The conversions to max() and min() are not correct if B is
4722 a number and A is not. The conditions in the original
4723 expressions will be false, so all four give B. The min()
4724 and max() versions would give a NaN instead. */
4725 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4726 /* Avoid these transformations if the COND_EXPR may be used
4727 as an lvalue in the C++ front-end. PR c++/19199. */
4729 || (strcmp (lang_hooks.name, "GNU C++") != 0
4730 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4731 || ! maybe_lvalue_p (arg1)
4732 || ! maybe_lvalue_p (arg2)))
4734 tree comp_op0 = arg00;
4735 tree comp_op1 = arg01;
4736 tree comp_type = TREE_TYPE (comp_op0);
4738 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4739 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4749 return pedantic_non_lvalue (fold_convert (type, arg2));
4751 return pedantic_non_lvalue (fold_convert (type, arg1));
4756 /* In C++ a ?: expression can be an lvalue, so put the
4757 operand which will be used if they are equal first
4758 so that we can convert this back to the
4759 corresponding COND_EXPR. */
4760 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4762 comp_op0 = fold_convert (comp_type, comp_op0);
4763 comp_op1 = fold_convert (comp_type, comp_op1);
4764 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4765 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4766 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4767 return pedantic_non_lvalue (fold_convert (type, tem));
4774 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4776 comp_op0 = fold_convert (comp_type, comp_op0);
4777 comp_op1 = fold_convert (comp_type, comp_op1);
4778 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4779 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4780 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4781 return pedantic_non_lvalue (fold_convert (type, tem));
4785 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4786 return pedantic_non_lvalue (fold_convert (type, arg2));
4789 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4790 return pedantic_non_lvalue (fold_convert (type, arg1));
4793 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4798 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4799 we might still be able to simplify this. For example,
4800 if C1 is one less or one more than C2, this might have started
4801 out as a MIN or MAX and been transformed by this function.
4802 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4804 if (INTEGRAL_TYPE_P (type)
4805 && TREE_CODE (arg01) == INTEGER_CST
4806 && TREE_CODE (arg2) == INTEGER_CST)
4810 /* We can replace A with C1 in this case. */
4811 arg1 = fold_convert (type, arg01);
4812 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4815 /* If C1 is C2 + 1, this is min(A, C2). */
4816 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4818 && operand_equal_p (arg01,
4819 const_binop (PLUS_EXPR, arg2,
4820 integer_one_node, 0),
4822 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4827 /* If C1 is C2 - 1, this is min(A, C2). */
4828 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4830 && operand_equal_p (arg01,
4831 const_binop (MINUS_EXPR, arg2,
4832 integer_one_node, 0),
4834 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4839 /* If C1 is C2 - 1, this is max(A, C2). */
4840 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4842 && operand_equal_p (arg01,
4843 const_binop (MINUS_EXPR, arg2,
4844 integer_one_node, 0),
4846 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4851 /* If C1 is C2 + 1, this is max(A, C2). */
4852 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4854 && operand_equal_p (arg01,
4855 const_binop (PLUS_EXPR, arg2,
4856 integer_one_node, 0),
4858 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4872 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4873 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4876 /* EXP is some logical combination of boolean tests. See if we can
4877 merge it into some range test. Return the new tree if so. */
4880 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4882 int or_op = (code == TRUTH_ORIF_EXPR
4883 || code == TRUTH_OR_EXPR);
4884 int in0_p, in1_p, in_p;
4885 tree low0, low1, low, high0, high1, high;
4886 bool strict_overflow_p = false;
4887 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4888 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4890 const char * const warnmsg = G_("assuming signed overflow does not occur "
4891 "when simplifying range test");
4893 /* If this is an OR operation, invert both sides; we will invert
4894 again at the end. */
4896 in0_p = ! in0_p, in1_p = ! in1_p;
4898 /* If both expressions are the same, if we can merge the ranges, and we
4899 can build the range test, return it or it inverted. If one of the
4900 ranges is always true or always false, consider it to be the same
4901 expression as the other. */
4902 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4903 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4905 && 0 != (tem = (build_range_check (type,
4907 : rhs != 0 ? rhs : integer_zero_node,
4910 if (strict_overflow_p)
4911 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4912 return or_op ? invert_truthvalue (tem) : tem;
4915 /* On machines where the branch cost is expensive, if this is a
4916 short-circuited branch and the underlying object on both sides
4917 is the same, make a non-short-circuit operation. */
4918 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4919 && lhs != 0 && rhs != 0
4920 && (code == TRUTH_ANDIF_EXPR
4921 || code == TRUTH_ORIF_EXPR)
4922 && operand_equal_p (lhs, rhs, 0))
4924 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4925 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4926 which cases we can't do this. */
4927 if (simple_operand_p (lhs))
4928 return build2 (code == TRUTH_ANDIF_EXPR
4929 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4932 else if (lang_hooks.decls.global_bindings_p () == 0
4933 && ! CONTAINS_PLACEHOLDER_P (lhs))
4935 tree common = save_expr (lhs);
4937 if (0 != (lhs = build_range_check (type, common,
4938 or_op ? ! in0_p : in0_p,
4940 && (0 != (rhs = build_range_check (type, common,
4941 or_op ? ! in1_p : in1_p,
4944 if (strict_overflow_p)
4945 fold_overflow_warning (warnmsg,
4946 WARN_STRICT_OVERFLOW_COMPARISON);
4947 return build2 (code == TRUTH_ANDIF_EXPR
4948 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4957 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4958 bit value. Arrange things so the extra bits will be set to zero if and
4959 only if C is signed-extended to its full width. If MASK is nonzero,
4960 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4963 unextend (tree c, int p, int unsignedp, tree mask)
4965 tree type = TREE_TYPE (c);
4966 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4969 if (p == modesize || unsignedp)
4972 /* We work by getting just the sign bit into the low-order bit, then
4973 into the high-order bit, then sign-extend. We then XOR that value
4975 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4976 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4978 /* We must use a signed type in order to get an arithmetic right shift.
4979 However, we must also avoid introducing accidental overflows, so that
4980 a subsequent call to integer_zerop will work. Hence we must
4981 do the type conversion here. At this point, the constant is either
4982 zero or one, and the conversion to a signed type can never overflow.
4983 We could get an overflow if this conversion is done anywhere else. */
4984 if (TYPE_UNSIGNED (type))
4985 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4987 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4988 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4990 temp = const_binop (BIT_AND_EXPR, temp,
4991 fold_convert (TREE_TYPE (c), mask), 0);
4992 /* If necessary, convert the type back to match the type of C. */
4993 if (TYPE_UNSIGNED (type))
4994 temp = fold_convert (type, temp);
4996 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4999 /* Find ways of folding logical expressions of LHS and RHS:
5000 Try to merge two comparisons to the same innermost item.
5001 Look for range tests like "ch >= '0' && ch <= '9'".
5002 Look for combinations of simple terms on machines with expensive branches
5003 and evaluate the RHS unconditionally.
5005 For example, if we have p->a == 2 && p->b == 4 and we can make an
5006 object large enough to span both A and B, we can do this with a comparison
5007 against the object ANDed with the a mask.
5009 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5010 operations to do this with one comparison.
5012 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5013 function and the one above.
5015 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5016 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5018 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5021 We return the simplified tree or 0 if no optimization is possible. */
5024 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5026 /* If this is the "or" of two comparisons, we can do something if
5027 the comparisons are NE_EXPR. If this is the "and", we can do something
5028 if the comparisons are EQ_EXPR. I.e.,
5029 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5031 WANTED_CODE is this operation code. For single bit fields, we can
5032 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5033 comparison for one-bit fields. */
5035 enum tree_code wanted_code;
5036 enum tree_code lcode, rcode;
5037 tree ll_arg, lr_arg, rl_arg, rr_arg;
5038 tree ll_inner, lr_inner, rl_inner, rr_inner;
5039 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5040 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5041 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5042 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5043 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5044 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5045 enum machine_mode lnmode, rnmode;
5046 tree ll_mask, lr_mask, rl_mask, rr_mask;
5047 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5048 tree l_const, r_const;
5049 tree lntype, rntype, result;
5050 int first_bit, end_bit;
5052 tree orig_lhs = lhs, orig_rhs = rhs;
5053 enum tree_code orig_code = code;
5055 /* Start by getting the comparison codes. Fail if anything is volatile.
5056 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5057 it were surrounded with a NE_EXPR. */
5059 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5062 lcode = TREE_CODE (lhs);
5063 rcode = TREE_CODE (rhs);
5065 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5067 lhs = build2 (NE_EXPR, truth_type, lhs,
5068 build_int_cst (TREE_TYPE (lhs), 0));
5072 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5074 rhs = build2 (NE_EXPR, truth_type, rhs,
5075 build_int_cst (TREE_TYPE (rhs), 0));
5079 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5080 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5083 ll_arg = TREE_OPERAND (lhs, 0);
5084 lr_arg = TREE_OPERAND (lhs, 1);
5085 rl_arg = TREE_OPERAND (rhs, 0);
5086 rr_arg = TREE_OPERAND (rhs, 1);
5088 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5089 if (simple_operand_p (ll_arg)
5090 && simple_operand_p (lr_arg))
5093 if (operand_equal_p (ll_arg, rl_arg, 0)
5094 && operand_equal_p (lr_arg, rr_arg, 0))
5096 result = combine_comparisons (code, lcode, rcode,
5097 truth_type, ll_arg, lr_arg);
5101 else if (operand_equal_p (ll_arg, rr_arg, 0)
5102 && operand_equal_p (lr_arg, rl_arg, 0))
5104 result = combine_comparisons (code, lcode,
5105 swap_tree_comparison (rcode),
5106 truth_type, ll_arg, lr_arg);
5112 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5113 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5115 /* If the RHS can be evaluated unconditionally and its operands are
5116 simple, it wins to evaluate the RHS unconditionally on machines
5117 with expensive branches. In this case, this isn't a comparison
5118 that can be merged. Avoid doing this if the RHS is a floating-point
5119 comparison since those can trap. */
5121 if (BRANCH_COST >= 2
5122 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5123 && simple_operand_p (rl_arg)
5124 && simple_operand_p (rr_arg))
5126 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5127 if (code == TRUTH_OR_EXPR
5128 && lcode == NE_EXPR && integer_zerop (lr_arg)
5129 && rcode == NE_EXPR && integer_zerop (rr_arg)
5130 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5131 return build2 (NE_EXPR, truth_type,
5132 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5134 build_int_cst (TREE_TYPE (ll_arg), 0));
5136 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5137 if (code == TRUTH_AND_EXPR
5138 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5139 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5140 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5141 return build2 (EQ_EXPR, truth_type,
5142 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5144 build_int_cst (TREE_TYPE (ll_arg), 0));
5146 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5148 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5149 return build2 (code, truth_type, lhs, rhs);
5154 /* See if the comparisons can be merged. Then get all the parameters for
5157 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5158 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5162 ll_inner = decode_field_reference (ll_arg,
5163 &ll_bitsize, &ll_bitpos, &ll_mode,
5164 &ll_unsignedp, &volatilep, &ll_mask,
5166 lr_inner = decode_field_reference (lr_arg,
5167 &lr_bitsize, &lr_bitpos, &lr_mode,
5168 &lr_unsignedp, &volatilep, &lr_mask,
5170 rl_inner = decode_field_reference (rl_arg,
5171 &rl_bitsize, &rl_bitpos, &rl_mode,
5172 &rl_unsignedp, &volatilep, &rl_mask,
5174 rr_inner = decode_field_reference (rr_arg,
5175 &rr_bitsize, &rr_bitpos, &rr_mode,
5176 &rr_unsignedp, &volatilep, &rr_mask,
5179 /* It must be true that the inner operation on the lhs of each
5180 comparison must be the same if we are to be able to do anything.
5181 Then see if we have constants. If not, the same must be true for
5183 if (volatilep || ll_inner == 0 || rl_inner == 0
5184 || ! operand_equal_p (ll_inner, rl_inner, 0))
5187 if (TREE_CODE (lr_arg) == INTEGER_CST
5188 && TREE_CODE (rr_arg) == INTEGER_CST)
5189 l_const = lr_arg, r_const = rr_arg;
5190 else if (lr_inner == 0 || rr_inner == 0
5191 || ! operand_equal_p (lr_inner, rr_inner, 0))
5194 l_const = r_const = 0;
5196 /* If either comparison code is not correct for our logical operation,
5197 fail. However, we can convert a one-bit comparison against zero into
5198 the opposite comparison against that bit being set in the field. */
5200 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5201 if (lcode != wanted_code)
5203 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5205 /* Make the left operand unsigned, since we are only interested
5206 in the value of one bit. Otherwise we are doing the wrong
5215 /* This is analogous to the code for l_const above. */
5216 if (rcode != wanted_code)
5218 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5227 /* After this point all optimizations will generate bit-field
5228 references, which we might not want. */
5229 if (! lang_hooks.can_use_bit_fields_p ())
5232 /* See if we can find a mode that contains both fields being compared on
5233 the left. If we can't, fail. Otherwise, update all constants and masks
5234 to be relative to a field of that size. */
5235 first_bit = MIN (ll_bitpos, rl_bitpos);
5236 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5237 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5238 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5240 if (lnmode == VOIDmode)
5243 lnbitsize = GET_MODE_BITSIZE (lnmode);
5244 lnbitpos = first_bit & ~ (lnbitsize - 1);
5245 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5246 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5248 if (BYTES_BIG_ENDIAN)
5250 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5251 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5254 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5255 size_int (xll_bitpos), 0);
5256 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5257 size_int (xrl_bitpos), 0);
5261 l_const = fold_convert (lntype, l_const);
5262 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5263 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5264 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5265 fold_build1 (BIT_NOT_EXPR,
5269 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5271 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5276 r_const = fold_convert (lntype, r_const);
5277 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5278 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5279 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5280 fold_build1 (BIT_NOT_EXPR,
5284 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5286 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5290 /* If the right sides are not constant, do the same for it. Also,
5291 disallow this optimization if a size or signedness mismatch occurs
5292 between the left and right sides. */
5295 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5296 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5297 /* Make sure the two fields on the right
5298 correspond to the left without being swapped. */
5299 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5302 first_bit = MIN (lr_bitpos, rr_bitpos);
5303 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5304 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5305 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5307 if (rnmode == VOIDmode)
5310 rnbitsize = GET_MODE_BITSIZE (rnmode);
5311 rnbitpos = first_bit & ~ (rnbitsize - 1);
5312 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5313 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5315 if (BYTES_BIG_ENDIAN)
5317 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5318 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5321 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5322 size_int (xlr_bitpos), 0);
5323 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5324 size_int (xrr_bitpos), 0);
5326 /* Make a mask that corresponds to both fields being compared.
5327 Do this for both items being compared. If the operands are the
5328 same size and the bits being compared are in the same position
5329 then we can do this by masking both and comparing the masked
5331 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5332 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5333 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5335 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5336 ll_unsignedp || rl_unsignedp);
5337 if (! all_ones_mask_p (ll_mask, lnbitsize))
5338 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5340 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5341 lr_unsignedp || rr_unsignedp);
5342 if (! all_ones_mask_p (lr_mask, rnbitsize))
5343 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5345 return build2 (wanted_code, truth_type, lhs, rhs);
5348 /* There is still another way we can do something: If both pairs of
5349 fields being compared are adjacent, we may be able to make a wider
5350 field containing them both.
5352 Note that we still must mask the lhs/rhs expressions. Furthermore,
5353 the mask must be shifted to account for the shift done by
5354 make_bit_field_ref. */
5355 if ((ll_bitsize + ll_bitpos == rl_bitpos
5356 && lr_bitsize + lr_bitpos == rr_bitpos)
5357 || (ll_bitpos == rl_bitpos + rl_bitsize
5358 && lr_bitpos == rr_bitpos + rr_bitsize))
5362 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5363 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5364 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5365 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5367 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5368 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5369 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5370 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5372 /* Convert to the smaller type before masking out unwanted bits. */
5374 if (lntype != rntype)
5376 if (lnbitsize > rnbitsize)
5378 lhs = fold_convert (rntype, lhs);
5379 ll_mask = fold_convert (rntype, ll_mask);
5382 else if (lnbitsize < rnbitsize)
5384 rhs = fold_convert (lntype, rhs);
5385 lr_mask = fold_convert (lntype, lr_mask);
5390 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5391 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5393 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5394 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5396 return build2 (wanted_code, truth_type, lhs, rhs);
5402 /* Handle the case of comparisons with constants. If there is something in
5403 common between the masks, those bits of the constants must be the same.
5404 If not, the condition is always false. Test for this to avoid generating
5405 incorrect code below. */
5406 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5407 if (! integer_zerop (result)
5408 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5409 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5411 if (wanted_code == NE_EXPR)
5413 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5414 return constant_boolean_node (true, truth_type);
5418 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5419 return constant_boolean_node (false, truth_type);
5423 /* Construct the expression we will return. First get the component
5424 reference we will make. Unless the mask is all ones the width of
5425 that field, perform the mask operation. Then compare with the
5427 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5428 ll_unsignedp || rl_unsignedp);
5430 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5431 if (! all_ones_mask_p (ll_mask, lnbitsize))
5432 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5434 return build2 (wanted_code, truth_type, result,
5435 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5438 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5442 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5445 enum tree_code op_code;
5446 tree comp_const = op1;
5448 int consts_equal, consts_lt;
5451 STRIP_SIGN_NOPS (arg0);
5453 op_code = TREE_CODE (arg0);
5454 minmax_const = TREE_OPERAND (arg0, 1);
5455 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5456 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5457 inner = TREE_OPERAND (arg0, 0);
5459 /* If something does not permit us to optimize, return the original tree. */
5460 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5461 || TREE_CODE (comp_const) != INTEGER_CST
5462 || TREE_CONSTANT_OVERFLOW (comp_const)
5463 || TREE_CODE (minmax_const) != INTEGER_CST
5464 || TREE_CONSTANT_OVERFLOW (minmax_const))
5467 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5468 and GT_EXPR, doing the rest with recursive calls using logical
5472 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5474 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5477 return invert_truthvalue (tem);
5483 fold_build2 (TRUTH_ORIF_EXPR, type,
5484 optimize_minmax_comparison
5485 (EQ_EXPR, type, arg0, comp_const),
5486 optimize_minmax_comparison
5487 (GT_EXPR, type, arg0, comp_const));
5490 if (op_code == MAX_EXPR && consts_equal)
5491 /* MAX (X, 0) == 0 -> X <= 0 */
5492 return fold_build2 (LE_EXPR, type, inner, comp_const);
5494 else if (op_code == MAX_EXPR && consts_lt)
5495 /* MAX (X, 0) == 5 -> X == 5 */
5496 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5498 else if (op_code == MAX_EXPR)
5499 /* MAX (X, 0) == -1 -> false */
5500 return omit_one_operand (type, integer_zero_node, inner);
5502 else if (consts_equal)
5503 /* MIN (X, 0) == 0 -> X >= 0 */
5504 return fold_build2 (GE_EXPR, type, inner, comp_const);
5507 /* MIN (X, 0) == 5 -> false */
5508 return omit_one_operand (type, integer_zero_node, inner);
5511 /* MIN (X, 0) == -1 -> X == -1 */
5512 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5515 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5516 /* MAX (X, 0) > 0 -> X > 0
5517 MAX (X, 0) > 5 -> X > 5 */
5518 return fold_build2 (GT_EXPR, type, inner, comp_const);
5520 else if (op_code == MAX_EXPR)
5521 /* MAX (X, 0) > -1 -> true */
5522 return omit_one_operand (type, integer_one_node, inner);
5524 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5525 /* MIN (X, 0) > 0 -> false
5526 MIN (X, 0) > 5 -> false */
5527 return omit_one_operand (type, integer_zero_node, inner);
5530 /* MIN (X, 0) > -1 -> X > -1 */
5531 return fold_build2 (GT_EXPR, type, inner, comp_const);
5538 /* T is an integer expression that is being multiplied, divided, or taken a
5539 modulus (CODE says which and what kind of divide or modulus) by a
5540 constant C. See if we can eliminate that operation by folding it with
5541 other operations already in T. WIDE_TYPE, if non-null, is a type that
5542 should be used for the computation if wider than our type.
5544 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5545 (X * 2) + (Y * 4). We must, however, be assured that either the original
5546 expression would not overflow or that overflow is undefined for the type
5547 in the language in question.
5549 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5550 the machine has a multiply-accumulate insn or that this is part of an
5551 addressing calculation.
5553 If we return a non-null expression, it is an equivalent form of the
5554 original computation, but need not be in the original type.
5556 We set *STRICT_OVERFLOW_P to true if the return values depends on
5557 signed overflow being undefined. Otherwise we do not change
5558 *STRICT_OVERFLOW_P. */
5561 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5562 bool *strict_overflow_p)
5564 /* To avoid exponential search depth, refuse to allow recursion past
5565 three levels. Beyond that (1) it's highly unlikely that we'll find
5566 something interesting and (2) we've probably processed it before
5567 when we built the inner expression. */
5576 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5583 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5584 bool *strict_overflow_p)
5586 tree type = TREE_TYPE (t);
5587 enum tree_code tcode = TREE_CODE (t);
5588 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5589 > GET_MODE_SIZE (TYPE_MODE (type)))
5590 ? wide_type : type);
5592 int same_p = tcode == code;
5593 tree op0 = NULL_TREE, op1 = NULL_TREE;
5594 bool sub_strict_overflow_p;
5596 /* Don't deal with constants of zero here; they confuse the code below. */
5597 if (integer_zerop (c))
5600 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5601 op0 = TREE_OPERAND (t, 0);
5603 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5604 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5606 /* Note that we need not handle conditional operations here since fold
5607 already handles those cases. So just do arithmetic here. */
5611 /* For a constant, we can always simplify if we are a multiply
5612 or (for divide and modulus) if it is a multiple of our constant. */
5613 if (code == MULT_EXPR
5614 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5615 return const_binop (code, fold_convert (ctype, t),
5616 fold_convert (ctype, c), 0);
5619 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5620 /* If op0 is an expression ... */
5621 if ((COMPARISON_CLASS_P (op0)
5622 || UNARY_CLASS_P (op0)
5623 || BINARY_CLASS_P (op0)
5624 || EXPRESSION_CLASS_P (op0))
5625 /* ... and is unsigned, and its type is smaller than ctype,
5626 then we cannot pass through as widening. */
5627 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5628 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5629 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5630 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5631 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5632 /* ... or this is a truncation (t is narrower than op0),
5633 then we cannot pass through this narrowing. */
5634 || (GET_MODE_SIZE (TYPE_MODE (type))
5635 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5636 /* ... or signedness changes for division or modulus,
5637 then we cannot pass through this conversion. */
5638 || (code != MULT_EXPR
5639 && (TYPE_UNSIGNED (ctype)
5640 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5643 /* Pass the constant down and see if we can make a simplification. If
5644 we can, replace this expression with the inner simplification for
5645 possible later conversion to our or some other type. */
5646 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5647 && TREE_CODE (t2) == INTEGER_CST
5648 && ! TREE_CONSTANT_OVERFLOW (t2)
5649 && (0 != (t1 = extract_muldiv (op0, t2, code,
5651 ? ctype : NULL_TREE,
5652 strict_overflow_p))))
5657 /* If widening the type changes it from signed to unsigned, then we
5658 must avoid building ABS_EXPR itself as unsigned. */
5659 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5661 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5662 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5665 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5666 return fold_convert (ctype, t1);
5670 /* If the constant is negative, we cannot simplify this. */
5671 if (tree_int_cst_sgn (c) == -1)
5675 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5677 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5680 case MIN_EXPR: case MAX_EXPR:
5681 /* If widening the type changes the signedness, then we can't perform
5682 this optimization as that changes the result. */
5683 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5686 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5687 sub_strict_overflow_p = false;
5688 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5689 &sub_strict_overflow_p)) != 0
5690 && (t2 = extract_muldiv (op1, c, code, wide_type,
5691 &sub_strict_overflow_p)) != 0)
5693 if (tree_int_cst_sgn (c) < 0)
5694 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5695 if (sub_strict_overflow_p)
5696 *strict_overflow_p = true;
5697 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5698 fold_convert (ctype, t2));
5702 case LSHIFT_EXPR: case RSHIFT_EXPR:
5703 /* If the second operand is constant, this is a multiplication
5704 or floor division, by a power of two, so we can treat it that
5705 way unless the multiplier or divisor overflows. Signed
5706 left-shift overflow is implementation-defined rather than
5707 undefined in C90, so do not convert signed left shift into
5709 if (TREE_CODE (op1) == INTEGER_CST
5710 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5711 /* const_binop may not detect overflow correctly,
5712 so check for it explicitly here. */
5713 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5714 && TREE_INT_CST_HIGH (op1) == 0
5715 && 0 != (t1 = fold_convert (ctype,
5716 const_binop (LSHIFT_EXPR,
5719 && ! TREE_OVERFLOW (t1))
5720 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5721 ? MULT_EXPR : FLOOR_DIV_EXPR,
5722 ctype, fold_convert (ctype, op0), t1),
5723 c, code, wide_type, strict_overflow_p);
5726 case PLUS_EXPR: case MINUS_EXPR:
5727 /* See if we can eliminate the operation on both sides. If we can, we
5728 can return a new PLUS or MINUS. If we can't, the only remaining
5729 cases where we can do anything are if the second operand is a
5731 sub_strict_overflow_p = false;
5732 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5733 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5734 if (t1 != 0 && t2 != 0
5735 && (code == MULT_EXPR
5736 /* If not multiplication, we can only do this if both operands
5737 are divisible by c. */
5738 || (multiple_of_p (ctype, op0, c)
5739 && multiple_of_p (ctype, op1, c))))
5741 if (sub_strict_overflow_p)
5742 *strict_overflow_p = true;
5743 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5744 fold_convert (ctype, t2));
5747 /* If this was a subtraction, negate OP1 and set it to be an addition.
5748 This simplifies the logic below. */
5749 if (tcode == MINUS_EXPR)
5750 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5752 if (TREE_CODE (op1) != INTEGER_CST)
5755 /* If either OP1 or C are negative, this optimization is not safe for
5756 some of the division and remainder types while for others we need
5757 to change the code. */
5758 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5760 if (code == CEIL_DIV_EXPR)
5761 code = FLOOR_DIV_EXPR;
5762 else if (code == FLOOR_DIV_EXPR)
5763 code = CEIL_DIV_EXPR;
5764 else if (code != MULT_EXPR
5765 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5769 /* If it's a multiply or a division/modulus operation of a multiple
5770 of our constant, do the operation and verify it doesn't overflow. */
5771 if (code == MULT_EXPR
5772 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5774 op1 = const_binop (code, fold_convert (ctype, op1),
5775 fold_convert (ctype, c), 0);
5776 /* We allow the constant to overflow with wrapping semantics. */
5778 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5784 /* If we have an unsigned type is not a sizetype, we cannot widen
5785 the operation since it will change the result if the original
5786 computation overflowed. */
5787 if (TYPE_UNSIGNED (ctype)
5788 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5792 /* If we were able to eliminate our operation from the first side,
5793 apply our operation to the second side and reform the PLUS. */
5794 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5795 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5797 /* The last case is if we are a multiply. In that case, we can
5798 apply the distributive law to commute the multiply and addition
5799 if the multiplication of the constants doesn't overflow. */
5800 if (code == MULT_EXPR)
5801 return fold_build2 (tcode, ctype,
5802 fold_build2 (code, ctype,
5803 fold_convert (ctype, op0),
5804 fold_convert (ctype, c)),
5810 /* We have a special case here if we are doing something like
5811 (C * 8) % 4 since we know that's zero. */
5812 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5813 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5814 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5815 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5816 return omit_one_operand (type, integer_zero_node, op0);
5818 /* ... fall through ... */
5820 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5821 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5822 /* If we can extract our operation from the LHS, do so and return a
5823 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5824 do something only if the second operand is a constant. */
5826 && (t1 = extract_muldiv (op0, c, code, wide_type,
5827 strict_overflow_p)) != 0)
5828 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5829 fold_convert (ctype, op1));
5830 else if (tcode == MULT_EXPR && code == MULT_EXPR
5831 && (t1 = extract_muldiv (op1, c, code, wide_type,
5832 strict_overflow_p)) != 0)
5833 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5834 fold_convert (ctype, t1));
5835 else if (TREE_CODE (op1) != INTEGER_CST)
5838 /* If these are the same operation types, we can associate them
5839 assuming no overflow. */
5841 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5842 fold_convert (ctype, c), 0))
5843 && ! TREE_OVERFLOW (t1))
5844 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5846 /* If these operations "cancel" each other, we have the main
5847 optimizations of this pass, which occur when either constant is a
5848 multiple of the other, in which case we replace this with either an
5849 operation or CODE or TCODE.
5851 If we have an unsigned type that is not a sizetype, we cannot do
5852 this since it will change the result if the original computation
5854 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5855 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5856 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5857 || (tcode == MULT_EXPR
5858 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5859 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5861 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5863 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5864 *strict_overflow_p = true;
5865 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5866 fold_convert (ctype,
5867 const_binop (TRUNC_DIV_EXPR,
5870 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5872 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5873 *strict_overflow_p = true;
5874 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5875 fold_convert (ctype,
5876 const_binop (TRUNC_DIV_EXPR,
5889 /* Return a node which has the indicated constant VALUE (either 0 or
5890 1), and is of the indicated TYPE. */
5893 constant_boolean_node (int value, tree type)
5895 if (type == integer_type_node)
5896 return value ? integer_one_node : integer_zero_node;
5897 else if (type == boolean_type_node)
5898 return value ? boolean_true_node : boolean_false_node;
5900 return build_int_cst (type, value);
5904 /* Return true if expr looks like an ARRAY_REF and set base and
5905 offset to the appropriate trees. If there is no offset,
5906 offset is set to NULL_TREE. Base will be canonicalized to
5907 something you can get the element type from using
5908 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5909 in bytes to the base. */
5912 extract_array_ref (tree expr, tree *base, tree *offset)
5914 /* One canonical form is a PLUS_EXPR with the first
5915 argument being an ADDR_EXPR with a possible NOP_EXPR
5917 if (TREE_CODE (expr) == PLUS_EXPR)
5919 tree op0 = TREE_OPERAND (expr, 0);
5920 tree inner_base, dummy1;
5921 /* Strip NOP_EXPRs here because the C frontends and/or
5922 folders present us (int *)&x.a + 4B possibly. */
5924 if (extract_array_ref (op0, &inner_base, &dummy1))
5927 if (dummy1 == NULL_TREE)
5928 *offset = TREE_OPERAND (expr, 1);
5930 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5931 dummy1, TREE_OPERAND (expr, 1));
5935 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5936 which we transform into an ADDR_EXPR with appropriate
5937 offset. For other arguments to the ADDR_EXPR we assume
5938 zero offset and as such do not care about the ADDR_EXPR
5939 type and strip possible nops from it. */
5940 else if (TREE_CODE (expr) == ADDR_EXPR)
5942 tree op0 = TREE_OPERAND (expr, 0);
5943 if (TREE_CODE (op0) == ARRAY_REF)
5945 tree idx = TREE_OPERAND (op0, 1);
5946 *base = TREE_OPERAND (op0, 0);
5947 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5948 array_ref_element_size (op0));
5952 /* Handle array-to-pointer decay as &a. */
5953 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5954 *base = TREE_OPERAND (expr, 0);
5957 *offset = NULL_TREE;
5961 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5962 else if (SSA_VAR_P (expr)
5963 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5966 *offset = NULL_TREE;
5974 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5975 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5976 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5977 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5978 COND is the first argument to CODE; otherwise (as in the example
5979 given here), it is the second argument. TYPE is the type of the
5980 original expression. Return NULL_TREE if no simplification is
5984 fold_binary_op_with_conditional_arg (enum tree_code code,
5985 tree type, tree op0, tree op1,
5986 tree cond, tree arg, int cond_first_p)
5988 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5989 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5990 tree test, true_value, false_value;
5991 tree lhs = NULL_TREE;
5992 tree rhs = NULL_TREE;
5994 /* This transformation is only worthwhile if we don't have to wrap
5995 arg in a SAVE_EXPR, and the operation can be simplified on at least
5996 one of the branches once its pushed inside the COND_EXPR. */
5997 if (!TREE_CONSTANT (arg))
6000 if (TREE_CODE (cond) == COND_EXPR)
6002 test = TREE_OPERAND (cond, 0);
6003 true_value = TREE_OPERAND (cond, 1);
6004 false_value = TREE_OPERAND (cond, 2);
6005 /* If this operand throws an expression, then it does not make
6006 sense to try to perform a logical or arithmetic operation
6008 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6010 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6015 tree testtype = TREE_TYPE (cond);
6017 true_value = constant_boolean_node (true, testtype);
6018 false_value = constant_boolean_node (false, testtype);
6021 arg = fold_convert (arg_type, arg);
6024 true_value = fold_convert (cond_type, true_value);
6026 lhs = fold_build2 (code, type, true_value, arg);
6028 lhs = fold_build2 (code, type, arg, true_value);
6032 false_value = fold_convert (cond_type, false_value);
6034 rhs = fold_build2 (code, type, false_value, arg);
6036 rhs = fold_build2 (code, type, arg, false_value);
6039 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6040 return fold_convert (type, test);
6044 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6046 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6047 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6048 ADDEND is the same as X.
6050 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6051 and finite. The problematic cases are when X is zero, and its mode
6052 has signed zeros. In the case of rounding towards -infinity,
6053 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6054 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6057 fold_real_zero_addition_p (tree type, tree addend, int negate)
6059 if (!real_zerop (addend))
6062 /* Don't allow the fold with -fsignaling-nans. */
6063 if (HONOR_SNANS (TYPE_MODE (type)))
6066 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6067 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6070 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6071 if (TREE_CODE (addend) == REAL_CST
6072 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6075 /* The mode has signed zeros, and we have to honor their sign.
6076 In this situation, there is only one case we can return true for.
6077 X - 0 is the same as X unless rounding towards -infinity is
6079 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6082 /* Subroutine of fold() that checks comparisons of built-in math
6083 functions against real constants.
6085 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6086 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6087 is the type of the result and ARG0 and ARG1 are the operands of the
6088 comparison. ARG1 must be a TREE_REAL_CST.
6090 The function returns the constant folded tree if a simplification
6091 can be made, and NULL_TREE otherwise. */
6094 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6095 tree type, tree arg0, tree arg1)
6099 if (BUILTIN_SQRT_P (fcode))
6101 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
6102 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6104 c = TREE_REAL_CST (arg1);
6105 if (REAL_VALUE_NEGATIVE (c))
6107 /* sqrt(x) < y is always false, if y is negative. */
6108 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6109 return omit_one_operand (type, integer_zero_node, arg);
6111 /* sqrt(x) > y is always true, if y is negative and we
6112 don't care about NaNs, i.e. negative values of x. */
6113 if (code == NE_EXPR || !HONOR_NANS (mode))
6114 return omit_one_operand (type, integer_one_node, arg);
6116 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6117 return fold_build2 (GE_EXPR, type, arg,
6118 build_real (TREE_TYPE (arg), dconst0));
6120 else if (code == GT_EXPR || code == GE_EXPR)
6124 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6125 real_convert (&c2, mode, &c2);
6127 if (REAL_VALUE_ISINF (c2))
6129 /* sqrt(x) > y is x == +Inf, when y is very large. */
6130 if (HONOR_INFINITIES (mode))
6131 return fold_build2 (EQ_EXPR, type, arg,
6132 build_real (TREE_TYPE (arg), c2));
6134 /* sqrt(x) > y is always false, when y is very large
6135 and we don't care about infinities. */
6136 return omit_one_operand (type, integer_zero_node, arg);
6139 /* sqrt(x) > c is the same as x > c*c. */
6140 return fold_build2 (code, type, arg,
6141 build_real (TREE_TYPE (arg), c2));
6143 else if (code == LT_EXPR || code == LE_EXPR)
6147 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6148 real_convert (&c2, mode, &c2);
6150 if (REAL_VALUE_ISINF (c2))
6152 /* sqrt(x) < y is always true, when y is a very large
6153 value and we don't care about NaNs or Infinities. */
6154 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6155 return omit_one_operand (type, integer_one_node, arg);
6157 /* sqrt(x) < y is x != +Inf when y is very large and we
6158 don't care about NaNs. */
6159 if (! HONOR_NANS (mode))
6160 return fold_build2 (NE_EXPR, type, arg,
6161 build_real (TREE_TYPE (arg), c2));
6163 /* sqrt(x) < y is x >= 0 when y is very large and we
6164 don't care about Infinities. */
6165 if (! HONOR_INFINITIES (mode))
6166 return fold_build2 (GE_EXPR, type, arg,
6167 build_real (TREE_TYPE (arg), dconst0));
6169 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6170 if (lang_hooks.decls.global_bindings_p () != 0
6171 || CONTAINS_PLACEHOLDER_P (arg))
6174 arg = save_expr (arg);
6175 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6176 fold_build2 (GE_EXPR, type, arg,
6177 build_real (TREE_TYPE (arg),
6179 fold_build2 (NE_EXPR, type, arg,
6180 build_real (TREE_TYPE (arg),
6184 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6185 if (! HONOR_NANS (mode))
6186 return fold_build2 (code, type, arg,
6187 build_real (TREE_TYPE (arg), c2));
6189 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6190 if (lang_hooks.decls.global_bindings_p () == 0
6191 && ! CONTAINS_PLACEHOLDER_P (arg))
6193 arg = save_expr (arg);
6194 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6195 fold_build2 (GE_EXPR, type, arg,
6196 build_real (TREE_TYPE (arg),
6198 fold_build2 (code, type, arg,
6199 build_real (TREE_TYPE (arg),
6208 /* Subroutine of fold() that optimizes comparisons against Infinities,
6209 either +Inf or -Inf.
6211 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6212 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6213 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6215 The function returns the constant folded tree if a simplification
6216 can be made, and NULL_TREE otherwise. */
6219 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6221 enum machine_mode mode;
6222 REAL_VALUE_TYPE max;
6226 mode = TYPE_MODE (TREE_TYPE (arg0));
6228 /* For negative infinity swap the sense of the comparison. */
6229 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6231 code = swap_tree_comparison (code);
6236 /* x > +Inf is always false, if with ignore sNANs. */
6237 if (HONOR_SNANS (mode))
6239 return omit_one_operand (type, integer_zero_node, arg0);
6242 /* x <= +Inf is always true, if we don't case about NaNs. */
6243 if (! HONOR_NANS (mode))
6244 return omit_one_operand (type, integer_one_node, arg0);
6246 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6247 if (lang_hooks.decls.global_bindings_p () == 0
6248 && ! CONTAINS_PLACEHOLDER_P (arg0))
6250 arg0 = save_expr (arg0);
6251 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6257 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6258 real_maxval (&max, neg, mode);
6259 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6260 arg0, build_real (TREE_TYPE (arg0), max));
6263 /* x < +Inf is always equal to x <= DBL_MAX. */
6264 real_maxval (&max, neg, mode);
6265 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6266 arg0, build_real (TREE_TYPE (arg0), max));
6269 /* x != +Inf is always equal to !(x > DBL_MAX). */
6270 real_maxval (&max, neg, mode);
6271 if (! HONOR_NANS (mode))
6272 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6273 arg0, build_real (TREE_TYPE (arg0), max));
6275 /* The transformation below creates non-gimple code and thus is
6276 not appropriate if we are in gimple form. */
6280 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6281 arg0, build_real (TREE_TYPE (arg0), max));
6282 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6291 /* Subroutine of fold() that optimizes comparisons of a division by
6292 a nonzero integer constant against an integer constant, i.e.
6295 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6296 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6297 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6299 The function returns the constant folded tree if a simplification
6300 can be made, and NULL_TREE otherwise. */
6303 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6305 tree prod, tmp, hi, lo;
6306 tree arg00 = TREE_OPERAND (arg0, 0);
6307 tree arg01 = TREE_OPERAND (arg0, 1);
6308 unsigned HOST_WIDE_INT lpart;
6309 HOST_WIDE_INT hpart;
6310 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6314 /* We have to do this the hard way to detect unsigned overflow.
6315 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6316 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6317 TREE_INT_CST_HIGH (arg01),
6318 TREE_INT_CST_LOW (arg1),
6319 TREE_INT_CST_HIGH (arg1),
6320 &lpart, &hpart, unsigned_p);
6321 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6322 prod = force_fit_type (prod, -1, overflow, false);
6323 neg_overflow = false;
6327 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6330 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6331 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6332 TREE_INT_CST_HIGH (prod),
6333 TREE_INT_CST_LOW (tmp),
6334 TREE_INT_CST_HIGH (tmp),
6335 &lpart, &hpart, unsigned_p);
6336 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6337 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6338 TREE_CONSTANT_OVERFLOW (prod));
6340 else if (tree_int_cst_sgn (arg01) >= 0)
6342 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6343 switch (tree_int_cst_sgn (arg1))
6346 neg_overflow = true;
6347 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6352 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6357 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6367 /* A negative divisor reverses the relational operators. */
6368 code = swap_tree_comparison (code);
6370 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6371 switch (tree_int_cst_sgn (arg1))
6374 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6379 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6384 neg_overflow = true;
6385 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6397 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6398 return omit_one_operand (type, integer_zero_node, arg00);
6399 if (TREE_OVERFLOW (hi))
6400 return fold_build2 (GE_EXPR, type, arg00, lo);
6401 if (TREE_OVERFLOW (lo))
6402 return fold_build2 (LE_EXPR, type, arg00, hi);
6403 return build_range_check (type, arg00, 1, lo, hi);
6406 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6407 return omit_one_operand (type, integer_one_node, arg00);
6408 if (TREE_OVERFLOW (hi))
6409 return fold_build2 (LT_EXPR, type, arg00, lo);
6410 if (TREE_OVERFLOW (lo))
6411 return fold_build2 (GT_EXPR, type, arg00, hi);
6412 return build_range_check (type, arg00, 0, lo, hi);
6415 if (TREE_OVERFLOW (lo))
6417 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6418 return omit_one_operand (type, tmp, arg00);
6420 return fold_build2 (LT_EXPR, type, arg00, lo);
6423 if (TREE_OVERFLOW (hi))
6425 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6426 return omit_one_operand (type, tmp, arg00);
6428 return fold_build2 (LE_EXPR, type, arg00, hi);
6431 if (TREE_OVERFLOW (hi))
6433 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6434 return omit_one_operand (type, tmp, arg00);
6436 return fold_build2 (GT_EXPR, type, arg00, hi);
6439 if (TREE_OVERFLOW (lo))
6441 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6442 return omit_one_operand (type, tmp, arg00);
6444 return fold_build2 (GE_EXPR, type, arg00, lo);
6454 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6455 equality/inequality test, then return a simplified form of the test
6456 using a sign testing. Otherwise return NULL. TYPE is the desired
6460 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6463 /* If this is testing a single bit, we can optimize the test. */
6464 if ((code == NE_EXPR || code == EQ_EXPR)
6465 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6466 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6468 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6469 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6470 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6472 if (arg00 != NULL_TREE
6473 /* This is only a win if casting to a signed type is cheap,
6474 i.e. when arg00's type is not a partial mode. */
6475 && TYPE_PRECISION (TREE_TYPE (arg00))
6476 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6478 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6479 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6480 result_type, fold_convert (stype, arg00),
6481 build_int_cst (stype, 0));
6488 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6489 equality/inequality test, then return a simplified form of
6490 the test using shifts and logical operations. Otherwise return
6491 NULL. TYPE is the desired result type. */
6494 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6497 /* If this is testing a single bit, we can optimize the test. */
6498 if ((code == NE_EXPR || code == EQ_EXPR)
6499 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6500 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6502 tree inner = TREE_OPERAND (arg0, 0);
6503 tree type = TREE_TYPE (arg0);
6504 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6505 enum machine_mode operand_mode = TYPE_MODE (type);
6507 tree signed_type, unsigned_type, intermediate_type;
6510 /* First, see if we can fold the single bit test into a sign-bit
6512 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6517 /* Otherwise we have (A & C) != 0 where C is a single bit,
6518 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6519 Similarly for (A & C) == 0. */
6521 /* If INNER is a right shift of a constant and it plus BITNUM does
6522 not overflow, adjust BITNUM and INNER. */
6523 if (TREE_CODE (inner) == RSHIFT_EXPR
6524 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6525 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6526 && bitnum < TYPE_PRECISION (type)
6527 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6528 bitnum - TYPE_PRECISION (type)))
6530 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6531 inner = TREE_OPERAND (inner, 0);
6534 /* If we are going to be able to omit the AND below, we must do our
6535 operations as unsigned. If we must use the AND, we have a choice.
6536 Normally unsigned is faster, but for some machines signed is. */
6537 #ifdef LOAD_EXTEND_OP
6538 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6539 && !flag_syntax_only) ? 0 : 1;
6544 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6545 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6546 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6547 inner = fold_convert (intermediate_type, inner);
6550 inner = build2 (RSHIFT_EXPR, intermediate_type,
6551 inner, size_int (bitnum));
6553 if (code == EQ_EXPR)
6554 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6555 inner, integer_one_node);
6557 /* Put the AND last so it can combine with more things. */
6558 inner = build2 (BIT_AND_EXPR, intermediate_type,
6559 inner, integer_one_node);
6561 /* Make sure to return the proper type. */
6562 inner = fold_convert (result_type, inner);
6569 /* Check whether we are allowed to reorder operands arg0 and arg1,
6570 such that the evaluation of arg1 occurs before arg0. */
6573 reorder_operands_p (tree arg0, tree arg1)
6575 if (! flag_evaluation_order)
6577 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6579 return ! TREE_SIDE_EFFECTS (arg0)
6580 && ! TREE_SIDE_EFFECTS (arg1);
6583 /* Test whether it is preferable two swap two operands, ARG0 and
6584 ARG1, for example because ARG0 is an integer constant and ARG1
6585 isn't. If REORDER is true, only recommend swapping if we can
6586 evaluate the operands in reverse order. */
6589 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6591 STRIP_SIGN_NOPS (arg0);
6592 STRIP_SIGN_NOPS (arg1);
6594 if (TREE_CODE (arg1) == INTEGER_CST)
6596 if (TREE_CODE (arg0) == INTEGER_CST)
6599 if (TREE_CODE (arg1) == REAL_CST)
6601 if (TREE_CODE (arg0) == REAL_CST)
6604 if (TREE_CODE (arg1) == COMPLEX_CST)
6606 if (TREE_CODE (arg0) == COMPLEX_CST)
6609 if (TREE_CONSTANT (arg1))
6611 if (TREE_CONSTANT (arg0))
6617 if (reorder && flag_evaluation_order
6618 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6626 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6627 for commutative and comparison operators. Ensuring a canonical
6628 form allows the optimizers to find additional redundancies without
6629 having to explicitly check for both orderings. */
6630 if (TREE_CODE (arg0) == SSA_NAME
6631 && TREE_CODE (arg1) == SSA_NAME
6632 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6638 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6639 ARG0 is extended to a wider type. */
6642 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6644 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6646 tree shorter_type, outer_type;
6650 if (arg0_unw == arg0)
6652 shorter_type = TREE_TYPE (arg0_unw);
6654 #ifdef HAVE_canonicalize_funcptr_for_compare
6655 /* Disable this optimization if we're casting a function pointer
6656 type on targets that require function pointer canonicalization. */
6657 if (HAVE_canonicalize_funcptr_for_compare
6658 && TREE_CODE (shorter_type) == POINTER_TYPE
6659 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6663 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6666 arg1_unw = get_unwidened (arg1, NULL_TREE);
6668 /* If possible, express the comparison in the shorter mode. */
6669 if ((code == EQ_EXPR || code == NE_EXPR
6670 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6671 && (TREE_TYPE (arg1_unw) == shorter_type
6672 || (TYPE_PRECISION (shorter_type)
6673 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6674 || (TREE_CODE (arg1_unw) == INTEGER_CST
6675 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6676 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6677 && int_fits_type_p (arg1_unw, shorter_type))))
6678 return fold_build2 (code, type, arg0_unw,
6679 fold_convert (shorter_type, arg1_unw));
6681 if (TREE_CODE (arg1_unw) != INTEGER_CST
6682 || TREE_CODE (shorter_type) != INTEGER_TYPE
6683 || !int_fits_type_p (arg1_unw, shorter_type))
6686 /* If we are comparing with the integer that does not fit into the range
6687 of the shorter type, the result is known. */
6688 outer_type = TREE_TYPE (arg1_unw);
6689 min = lower_bound_in_type (outer_type, shorter_type);
6690 max = upper_bound_in_type (outer_type, shorter_type);
6692 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6694 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6701 return omit_one_operand (type, integer_zero_node, arg0);
6706 return omit_one_operand (type, integer_one_node, arg0);
6712 return omit_one_operand (type, integer_one_node, arg0);
6714 return omit_one_operand (type, integer_zero_node, arg0);
6719 return omit_one_operand (type, integer_zero_node, arg0);
6721 return omit_one_operand (type, integer_one_node, arg0);
6730 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6731 ARG0 just the signedness is changed. */
6734 fold_sign_changed_comparison (enum tree_code code, tree type,
6735 tree arg0, tree arg1)
6737 tree arg0_inner, tmp;
6738 tree inner_type, outer_type;
6740 if (TREE_CODE (arg0) != NOP_EXPR
6741 && TREE_CODE (arg0) != CONVERT_EXPR)
6744 outer_type = TREE_TYPE (arg0);
6745 arg0_inner = TREE_OPERAND (arg0, 0);
6746 inner_type = TREE_TYPE (arg0_inner);
6748 #ifdef HAVE_canonicalize_funcptr_for_compare
6749 /* Disable this optimization if we're casting a function pointer
6750 type on targets that require function pointer canonicalization. */
6751 if (HAVE_canonicalize_funcptr_for_compare
6752 && TREE_CODE (inner_type) == POINTER_TYPE
6753 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6757 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6760 if (TREE_CODE (arg1) != INTEGER_CST
6761 && !((TREE_CODE (arg1) == NOP_EXPR
6762 || TREE_CODE (arg1) == CONVERT_EXPR)
6763 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6766 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6771 if (TREE_CODE (arg1) == INTEGER_CST)
6773 tmp = build_int_cst_wide (inner_type,
6774 TREE_INT_CST_LOW (arg1),
6775 TREE_INT_CST_HIGH (arg1));
6776 arg1 = force_fit_type (tmp, 0,
6777 TREE_OVERFLOW (arg1),
6778 TREE_CONSTANT_OVERFLOW (arg1));
6781 arg1 = fold_convert (inner_type, arg1);
6783 return fold_build2 (code, type, arg0_inner, arg1);
6786 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6787 step of the array. Reconstructs s and delta in the case of s * delta
6788 being an integer constant (and thus already folded).
6789 ADDR is the address. MULT is the multiplicative expression.
6790 If the function succeeds, the new address expression is returned. Otherwise
6791 NULL_TREE is returned. */
6794 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6796 tree s, delta, step;
6797 tree ref = TREE_OPERAND (addr, 0), pref;
6801 /* Canonicalize op1 into a possibly non-constant delta
6802 and an INTEGER_CST s. */
6803 if (TREE_CODE (op1) == MULT_EXPR)
6805 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6810 if (TREE_CODE (arg0) == INTEGER_CST)
6815 else if (TREE_CODE (arg1) == INTEGER_CST)
6823 else if (TREE_CODE (op1) == INTEGER_CST)
6830 /* Simulate we are delta * 1. */
6832 s = integer_one_node;
6835 for (;; ref = TREE_OPERAND (ref, 0))
6837 if (TREE_CODE (ref) == ARRAY_REF)
6839 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6843 step = array_ref_element_size (ref);
6844 if (TREE_CODE (step) != INTEGER_CST)
6849 if (! tree_int_cst_equal (step, s))
6854 /* Try if delta is a multiple of step. */
6855 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6864 if (!handled_component_p (ref))
6868 /* We found the suitable array reference. So copy everything up to it,
6869 and replace the index. */
6871 pref = TREE_OPERAND (addr, 0);
6872 ret = copy_node (pref);
6877 pref = TREE_OPERAND (pref, 0);
6878 TREE_OPERAND (pos, 0) = copy_node (pref);
6879 pos = TREE_OPERAND (pos, 0);
6882 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6883 fold_convert (itype,
6884 TREE_OPERAND (pos, 1)),
6885 fold_convert (itype, delta));
6887 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6891 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6892 means A >= Y && A != MAX, but in this case we know that
6893 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6896 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6898 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6900 if (TREE_CODE (bound) == LT_EXPR)
6901 a = TREE_OPERAND (bound, 0);
6902 else if (TREE_CODE (bound) == GT_EXPR)
6903 a = TREE_OPERAND (bound, 1);
6907 typea = TREE_TYPE (a);
6908 if (!INTEGRAL_TYPE_P (typea)
6909 && !POINTER_TYPE_P (typea))
6912 if (TREE_CODE (ineq) == LT_EXPR)
6914 a1 = TREE_OPERAND (ineq, 1);
6915 y = TREE_OPERAND (ineq, 0);
6917 else if (TREE_CODE (ineq) == GT_EXPR)
6919 a1 = TREE_OPERAND (ineq, 0);
6920 y = TREE_OPERAND (ineq, 1);
6925 if (TREE_TYPE (a1) != typea)
6928 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6929 if (!integer_onep (diff))
6932 return fold_build2 (GE_EXPR, type, a, y);
6935 /* Fold a sum or difference of at least one multiplication.
6936 Returns the folded tree or NULL if no simplification could be made. */
6939 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6941 tree arg00, arg01, arg10, arg11;
6942 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6944 /* (A * C) +- (B * C) -> (A+-B) * C.
6945 (A * C) +- A -> A * (C+-1).
6946 We are most concerned about the case where C is a constant,
6947 but other combinations show up during loop reduction. Since
6948 it is not difficult, try all four possibilities. */
6950 if (TREE_CODE (arg0) == MULT_EXPR)
6952 arg00 = TREE_OPERAND (arg0, 0);
6953 arg01 = TREE_OPERAND (arg0, 1);
6958 arg01 = build_one_cst (type);
6960 if (TREE_CODE (arg1) == MULT_EXPR)
6962 arg10 = TREE_OPERAND (arg1, 0);
6963 arg11 = TREE_OPERAND (arg1, 1);
6968 arg11 = build_one_cst (type);
6972 if (operand_equal_p (arg01, arg11, 0))
6973 same = arg01, alt0 = arg00, alt1 = arg10;
6974 else if (operand_equal_p (arg00, arg10, 0))
6975 same = arg00, alt0 = arg01, alt1 = arg11;
6976 else if (operand_equal_p (arg00, arg11, 0))
6977 same = arg00, alt0 = arg01, alt1 = arg10;
6978 else if (operand_equal_p (arg01, arg10, 0))
6979 same = arg01, alt0 = arg00, alt1 = arg11;
6981 /* No identical multiplicands; see if we can find a common
6982 power-of-two factor in non-power-of-two multiplies. This
6983 can help in multi-dimensional array access. */
6984 else if (host_integerp (arg01, 0)
6985 && host_integerp (arg11, 0))
6987 HOST_WIDE_INT int01, int11, tmp;
6990 int01 = TREE_INT_CST_LOW (arg01);
6991 int11 = TREE_INT_CST_LOW (arg11);
6993 /* Move min of absolute values to int11. */
6994 if ((int01 >= 0 ? int01 : -int01)
6995 < (int11 >= 0 ? int11 : -int11))
6997 tmp = int01, int01 = int11, int11 = tmp;
6998 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7005 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
7007 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7008 build_int_cst (TREE_TYPE (arg00),
7013 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7018 return fold_build2 (MULT_EXPR, type,
7019 fold_build2 (code, type,
7020 fold_convert (type, alt0),
7021 fold_convert (type, alt1)),
7022 fold_convert (type, same));
7027 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7028 specified by EXPR into the buffer PTR of length LEN bytes.
7029 Return the number of bytes placed in the buffer, or zero
7033 native_encode_int (tree expr, unsigned char *ptr, int len)
7035 tree type = TREE_TYPE (expr);
7036 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7037 int byte, offset, word, words;
7038 unsigned char value;
7040 if (total_bytes > len)
7042 words = total_bytes / UNITS_PER_WORD;
7044 for (byte = 0; byte < total_bytes; byte++)
7046 int bitpos = byte * BITS_PER_UNIT;
7047 if (bitpos < HOST_BITS_PER_WIDE_INT)
7048 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7050 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7051 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7053 if (total_bytes > UNITS_PER_WORD)
7055 word = byte / UNITS_PER_WORD;
7056 if (WORDS_BIG_ENDIAN)
7057 word = (words - 1) - word;
7058 offset = word * UNITS_PER_WORD;
7059 if (BYTES_BIG_ENDIAN)
7060 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7062 offset += byte % UNITS_PER_WORD;
7065 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7066 ptr[offset] = value;
7072 /* Subroutine of native_encode_expr. Encode the REAL_CST
7073 specified by EXPR into the buffer PTR of length LEN bytes.
7074 Return the number of bytes placed in the buffer, or zero
7078 native_encode_real (tree expr, unsigned char *ptr, int len)
7080 tree type = TREE_TYPE (expr);
7081 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7082 int byte, offset, word, words, bitpos;
7083 unsigned char value;
7085 /* There are always 32 bits in each long, no matter the size of
7086 the hosts long. We handle floating point representations with
7090 if (total_bytes > len)
7092 words = 32 / UNITS_PER_WORD;
7094 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7096 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7097 bitpos += BITS_PER_UNIT)
7099 byte = (bitpos / BITS_PER_UNIT) & 3;
7100 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7102 if (UNITS_PER_WORD < 4)
7104 word = byte / UNITS_PER_WORD;
7105 if (WORDS_BIG_ENDIAN)
7106 word = (words - 1) - word;
7107 offset = word * UNITS_PER_WORD;
7108 if (BYTES_BIG_ENDIAN)
7109 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7111 offset += byte % UNITS_PER_WORD;
7114 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7115 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7120 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7121 specified by EXPR into the buffer PTR of length LEN bytes.
7122 Return the number of bytes placed in the buffer, or zero
7126 native_encode_complex (tree expr, unsigned char *ptr, int len)
7131 part = TREE_REALPART (expr);
7132 rsize = native_encode_expr (part, ptr, len);
7135 part = TREE_IMAGPART (expr);
7136 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7139 return rsize + isize;
7143 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7144 specified by EXPR into the buffer PTR of length LEN bytes.
7145 Return the number of bytes placed in the buffer, or zero
7149 native_encode_vector (tree expr, unsigned char *ptr, int len)
7151 int i, size, offset, count;
7152 tree itype, elem, elements;
7155 elements = TREE_VECTOR_CST_ELTS (expr);
7156 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7157 itype = TREE_TYPE (TREE_TYPE (expr));
7158 size = GET_MODE_SIZE (TYPE_MODE (itype));
7159 for (i = 0; i < count; i++)
7163 elem = TREE_VALUE (elements);
7164 elements = TREE_CHAIN (elements);
7171 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7176 if (offset + size > len)
7178 memset (ptr+offset, 0, size);
7186 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7187 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7188 buffer PTR of length LEN bytes. Return the number of bytes
7189 placed in the buffer, or zero upon failure. */
7192 native_encode_expr (tree expr, unsigned char *ptr, int len)
7194 switch (TREE_CODE (expr))
7197 return native_encode_int (expr, ptr, len);
7200 return native_encode_real (expr, ptr, len);
7203 return native_encode_complex (expr, ptr, len);
7206 return native_encode_vector (expr, ptr, len);
7214 /* Subroutine of native_interpret_expr. Interpret the contents of
7215 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7216 If the buffer cannot be interpreted, return NULL_TREE. */
7219 native_interpret_int (tree type, unsigned char *ptr, int len)
7221 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7222 int byte, offset, word, words;
7223 unsigned char value;
7224 unsigned int HOST_WIDE_INT lo = 0;
7225 HOST_WIDE_INT hi = 0;
7227 if (total_bytes > len)
7229 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7231 words = total_bytes / UNITS_PER_WORD;
7233 for (byte = 0; byte < total_bytes; byte++)
7235 int bitpos = byte * BITS_PER_UNIT;
7236 if (total_bytes > UNITS_PER_WORD)
7238 word = byte / UNITS_PER_WORD;
7239 if (WORDS_BIG_ENDIAN)
7240 word = (words - 1) - word;
7241 offset = word * UNITS_PER_WORD;
7242 if (BYTES_BIG_ENDIAN)
7243 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7245 offset += byte % UNITS_PER_WORD;
7248 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7249 value = ptr[offset];
7251 if (bitpos < HOST_BITS_PER_WIDE_INT)
7252 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7254 hi |= (unsigned HOST_WIDE_INT) value
7255 << (bitpos - HOST_BITS_PER_WIDE_INT);
7258 return force_fit_type (build_int_cst_wide (type, lo, hi),
7263 /* Subroutine of native_interpret_expr. Interpret the contents of
7264 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7265 If the buffer cannot be interpreted, return NULL_TREE. */
7268 native_interpret_real (tree type, unsigned char *ptr, int len)
7270 enum machine_mode mode = TYPE_MODE (type);
7271 int total_bytes = GET_MODE_SIZE (mode);
7272 int byte, offset, word, words, bitpos;
7273 unsigned char value;
7274 /* There are always 32 bits in each long, no matter the size of
7275 the hosts long. We handle floating point representations with
7280 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7281 if (total_bytes > len || total_bytes > 24)
7283 words = 32 / UNITS_PER_WORD;
7285 memset (tmp, 0, sizeof (tmp));
7286 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7287 bitpos += BITS_PER_UNIT)
7289 byte = (bitpos / BITS_PER_UNIT) & 3;
7290 if (UNITS_PER_WORD < 4)
7292 word = byte / UNITS_PER_WORD;
7293 if (WORDS_BIG_ENDIAN)
7294 word = (words - 1) - word;
7295 offset = word * UNITS_PER_WORD;
7296 if (BYTES_BIG_ENDIAN)
7297 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7299 offset += byte % UNITS_PER_WORD;
7302 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7303 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7305 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7308 real_from_target (&r, tmp, mode);
7309 return build_real (type, r);
7313 /* Subroutine of native_interpret_expr. Interpret the contents of
7314 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7315 If the buffer cannot be interpreted, return NULL_TREE. */
7318 native_interpret_complex (tree type, unsigned char *ptr, int len)
7320 tree etype, rpart, ipart;
7323 etype = TREE_TYPE (type);
7324 size = GET_MODE_SIZE (TYPE_MODE (etype));
7327 rpart = native_interpret_expr (etype, ptr, size);
7330 ipart = native_interpret_expr (etype, ptr+size, size);
7333 return build_complex (type, rpart, ipart);
7337 /* Subroutine of native_interpret_expr. Interpret the contents of
7338 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7339 If the buffer cannot be interpreted, return NULL_TREE. */
7342 native_interpret_vector (tree type, unsigned char *ptr, int len)
7344 tree etype, elem, elements;
7347 etype = TREE_TYPE (type);
7348 size = GET_MODE_SIZE (TYPE_MODE (etype));
7349 count = TYPE_VECTOR_SUBPARTS (type);
7350 if (size * count > len)
7353 elements = NULL_TREE;
7354 for (i = count - 1; i >= 0; i--)
7356 elem = native_interpret_expr (etype, ptr+(i*size), size);
7359 elements = tree_cons (NULL_TREE, elem, elements);
7361 return build_vector (type, elements);
7365 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7366 the buffer PTR of length LEN as a constant of type TYPE. For
7367 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7368 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7369 return NULL_TREE. */
7372 native_interpret_expr (tree type, unsigned char *ptr, int len)
7374 switch (TREE_CODE (type))
7379 return native_interpret_int (type, ptr, len);
7382 return native_interpret_real (type, ptr, len);
7385 return native_interpret_complex (type, ptr, len);
7388 return native_interpret_vector (type, ptr, len);
7396 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7397 TYPE at compile-time. If we're unable to perform the conversion
7398 return NULL_TREE. */
7401 fold_view_convert_expr (tree type, tree expr)
7403 /* We support up to 512-bit values (for V8DFmode). */
7404 unsigned char buffer[64];
7407 /* Check that the host and target are sane. */
7408 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7411 len = native_encode_expr (expr, buffer, sizeof (buffer));
7415 return native_interpret_expr (type, buffer, len);
7419 /* Fold a unary expression of code CODE and type TYPE with operand
7420 OP0. Return the folded expression if folding is successful.
7421 Otherwise, return NULL_TREE. */
7424 fold_unary (enum tree_code code, tree type, tree op0)
7428 enum tree_code_class kind = TREE_CODE_CLASS (code);
7430 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7431 && TREE_CODE_LENGTH (code) == 1);
7436 if (code == NOP_EXPR || code == CONVERT_EXPR
7437 || code == FLOAT_EXPR || code == ABS_EXPR)
7439 /* Don't use STRIP_NOPS, because signedness of argument type
7441 STRIP_SIGN_NOPS (arg0);
7445 /* Strip any conversions that don't change the mode. This
7446 is safe for every expression, except for a comparison
7447 expression because its signedness is derived from its
7450 Note that this is done as an internal manipulation within
7451 the constant folder, in order to find the simplest
7452 representation of the arguments so that their form can be
7453 studied. In any cases, the appropriate type conversions
7454 should be put back in the tree that will get out of the
7460 if (TREE_CODE_CLASS (code) == tcc_unary)
7462 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7463 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7464 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7465 else if (TREE_CODE (arg0) == COND_EXPR)
7467 tree arg01 = TREE_OPERAND (arg0, 1);
7468 tree arg02 = TREE_OPERAND (arg0, 2);
7469 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7470 arg01 = fold_build1 (code, type, arg01);
7471 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7472 arg02 = fold_build1 (code, type, arg02);
7473 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7476 /* If this was a conversion, and all we did was to move into
7477 inside the COND_EXPR, bring it back out. But leave it if
7478 it is a conversion from integer to integer and the
7479 result precision is no wider than a word since such a
7480 conversion is cheap and may be optimized away by combine,
7481 while it couldn't if it were outside the COND_EXPR. Then return
7482 so we don't get into an infinite recursion loop taking the
7483 conversion out and then back in. */
7485 if ((code == NOP_EXPR || code == CONVERT_EXPR
7486 || code == NON_LVALUE_EXPR)
7487 && TREE_CODE (tem) == COND_EXPR
7488 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7489 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7490 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7491 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7492 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7493 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7494 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7496 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7497 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7498 || flag_syntax_only))
7499 tem = build1 (code, type,
7501 TREE_TYPE (TREE_OPERAND
7502 (TREE_OPERAND (tem, 1), 0)),
7503 TREE_OPERAND (tem, 0),
7504 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7505 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7508 else if (COMPARISON_CLASS_P (arg0))
7510 if (TREE_CODE (type) == BOOLEAN_TYPE)
7512 arg0 = copy_node (arg0);
7513 TREE_TYPE (arg0) = type;
7516 else if (TREE_CODE (type) != INTEGER_TYPE)
7517 return fold_build3 (COND_EXPR, type, arg0,
7518 fold_build1 (code, type,
7520 fold_build1 (code, type,
7521 integer_zero_node));
7530 case FIX_TRUNC_EXPR:
7532 case FIX_FLOOR_EXPR:
7533 case FIX_ROUND_EXPR:
7534 if (TREE_TYPE (op0) == type)
7537 /* If we have (type) (a CMP b) and type is an integral type, return
7538 new expression involving the new type. */
7539 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7540 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7541 TREE_OPERAND (op0, 1));
7543 /* Handle cases of two conversions in a row. */
7544 if (TREE_CODE (op0) == NOP_EXPR
7545 || TREE_CODE (op0) == CONVERT_EXPR)
7547 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7548 tree inter_type = TREE_TYPE (op0);
7549 int inside_int = INTEGRAL_TYPE_P (inside_type);
7550 int inside_ptr = POINTER_TYPE_P (inside_type);
7551 int inside_float = FLOAT_TYPE_P (inside_type);
7552 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7553 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7554 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7555 int inter_int = INTEGRAL_TYPE_P (inter_type);
7556 int inter_ptr = POINTER_TYPE_P (inter_type);
7557 int inter_float = FLOAT_TYPE_P (inter_type);
7558 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7559 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7560 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7561 int final_int = INTEGRAL_TYPE_P (type);
7562 int final_ptr = POINTER_TYPE_P (type);
7563 int final_float = FLOAT_TYPE_P (type);
7564 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7565 unsigned int final_prec = TYPE_PRECISION (type);
7566 int final_unsignedp = TYPE_UNSIGNED (type);
7568 /* In addition to the cases of two conversions in a row
7569 handled below, if we are converting something to its own
7570 type via an object of identical or wider precision, neither
7571 conversion is needed. */
7572 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7573 && (((inter_int || inter_ptr) && final_int)
7574 || (inter_float && final_float))
7575 && inter_prec >= final_prec)
7576 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7578 /* Likewise, if the intermediate and final types are either both
7579 float or both integer, we don't need the middle conversion if
7580 it is wider than the final type and doesn't change the signedness
7581 (for integers). Avoid this if the final type is a pointer
7582 since then we sometimes need the inner conversion. Likewise if
7583 the outer has a precision not equal to the size of its mode. */
7584 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7585 || (inter_float && inside_float)
7586 || (inter_vec && inside_vec))
7587 && inter_prec >= inside_prec
7588 && (inter_float || inter_vec
7589 || inter_unsignedp == inside_unsignedp)
7590 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7591 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7593 && (! final_vec || inter_prec == inside_prec))
7594 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7596 /* If we have a sign-extension of a zero-extended value, we can
7597 replace that by a single zero-extension. */
7598 if (inside_int && inter_int && final_int
7599 && inside_prec < inter_prec && inter_prec < final_prec
7600 && inside_unsignedp && !inter_unsignedp)
7601 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7603 /* Two conversions in a row are not needed unless:
7604 - some conversion is floating-point (overstrict for now), or
7605 - some conversion is a vector (overstrict for now), or
7606 - the intermediate type is narrower than both initial and
7608 - the intermediate type and innermost type differ in signedness,
7609 and the outermost type is wider than the intermediate, or
7610 - the initial type is a pointer type and the precisions of the
7611 intermediate and final types differ, or
7612 - the final type is a pointer type and the precisions of the
7613 initial and intermediate types differ.
7614 - the final type is a pointer type and the initial type not
7615 - the initial type is a pointer to an array and the final type
7617 /* Java pointer type conversions generate checks in some
7618 cases, so we explicitly disallow this optimization. */
7619 if (! inside_float && ! inter_float && ! final_float
7620 && ! inside_vec && ! inter_vec && ! final_vec
7621 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7622 && ! (inside_int && inter_int
7623 && inter_unsignedp != inside_unsignedp
7624 && inter_prec < final_prec)
7625 && ((inter_unsignedp && inter_prec > inside_prec)
7626 == (final_unsignedp && final_prec > inter_prec))
7627 && ! (inside_ptr && inter_prec != final_prec)
7628 && ! (final_ptr && inside_prec != inter_prec)
7629 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7630 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7631 && final_ptr == inside_ptr
7633 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7634 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE)
7635 && ! ((strcmp (lang_hooks.name, "GNU Java") == 0)
7637 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7640 /* Handle (T *)&A.B.C for A being of type T and B and C
7641 living at offset zero. This occurs frequently in
7642 C++ upcasting and then accessing the base. */
7643 if (TREE_CODE (op0) == ADDR_EXPR
7644 && POINTER_TYPE_P (type)
7645 && handled_component_p (TREE_OPERAND (op0, 0)))
7647 HOST_WIDE_INT bitsize, bitpos;
7649 enum machine_mode mode;
7650 int unsignedp, volatilep;
7651 tree base = TREE_OPERAND (op0, 0);
7652 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7653 &mode, &unsignedp, &volatilep, false);
7654 /* If the reference was to a (constant) zero offset, we can use
7655 the address of the base if it has the same base type
7656 as the result type. */
7657 if (! offset && bitpos == 0
7658 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7659 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7660 return fold_convert (type, build_fold_addr_expr (base));
7663 if (TREE_CODE (op0) == MODIFY_EXPR
7664 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7665 /* Detect assigning a bitfield. */
7666 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7667 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7669 /* Don't leave an assignment inside a conversion
7670 unless assigning a bitfield. */
7671 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
7672 /* First do the assignment, then return converted constant. */
7673 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7674 TREE_NO_WARNING (tem) = 1;
7675 TREE_USED (tem) = 1;
7679 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7680 constants (if x has signed type, the sign bit cannot be set
7681 in c). This folds extension into the BIT_AND_EXPR. */
7682 if (INTEGRAL_TYPE_P (type)
7683 && TREE_CODE (type) != BOOLEAN_TYPE
7684 && TREE_CODE (op0) == BIT_AND_EXPR
7685 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7688 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7691 if (TYPE_UNSIGNED (TREE_TYPE (and))
7692 || (TYPE_PRECISION (type)
7693 <= TYPE_PRECISION (TREE_TYPE (and))))
7695 else if (TYPE_PRECISION (TREE_TYPE (and1))
7696 <= HOST_BITS_PER_WIDE_INT
7697 && host_integerp (and1, 1))
7699 unsigned HOST_WIDE_INT cst;
7701 cst = tree_low_cst (and1, 1);
7702 cst &= (HOST_WIDE_INT) -1
7703 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7704 change = (cst == 0);
7705 #ifdef LOAD_EXTEND_OP
7707 && !flag_syntax_only
7708 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7711 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7712 and0 = fold_convert (uns, and0);
7713 and1 = fold_convert (uns, and1);
7719 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7720 TREE_INT_CST_HIGH (and1));
7721 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7722 TREE_CONSTANT_OVERFLOW (and1));
7723 return fold_build2 (BIT_AND_EXPR, type,
7724 fold_convert (type, and0), tem);
7728 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7729 T2 being pointers to types of the same size. */
7730 if (POINTER_TYPE_P (type)
7731 && BINARY_CLASS_P (arg0)
7732 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7733 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7735 tree arg00 = TREE_OPERAND (arg0, 0);
7737 tree t1 = TREE_TYPE (arg00);
7738 tree tt0 = TREE_TYPE (t0);
7739 tree tt1 = TREE_TYPE (t1);
7740 tree s0 = TYPE_SIZE (tt0);
7741 tree s1 = TYPE_SIZE (tt1);
7743 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7744 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7745 TREE_OPERAND (arg0, 1));
7748 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7749 of the same precision, and X is a integer type not narrower than
7750 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7751 if (INTEGRAL_TYPE_P (type)
7752 && TREE_CODE (op0) == BIT_NOT_EXPR
7753 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7754 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7755 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7756 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7758 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7759 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7760 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7761 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7764 tem = fold_convert_const (code, type, op0);
7765 return tem ? tem : NULL_TREE;
7767 case VIEW_CONVERT_EXPR:
7768 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7769 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7770 return fold_view_convert_expr (type, op0);
7773 tem = fold_negate_expr (arg0);
7775 return fold_convert (type, tem);
7779 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7780 return fold_abs_const (arg0, type);
7781 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7782 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7783 /* Convert fabs((double)float) into (double)fabsf(float). */
7784 else if (TREE_CODE (arg0) == NOP_EXPR
7785 && TREE_CODE (type) == REAL_TYPE)
7787 tree targ0 = strip_float_extensions (arg0);
7789 return fold_convert (type, fold_build1 (ABS_EXPR,
7793 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7794 else if (TREE_CODE (arg0) == ABS_EXPR)
7796 else if (tree_expr_nonnegative_p (arg0))
7799 /* Strip sign ops from argument. */
7800 if (TREE_CODE (type) == REAL_TYPE)
7802 tem = fold_strip_sign_ops (arg0);
7804 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7809 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7810 return fold_convert (type, arg0);
7811 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7813 tree itype = TREE_TYPE (type);
7814 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7815 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7816 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7818 if (TREE_CODE (arg0) == COMPLEX_CST)
7820 tree itype = TREE_TYPE (type);
7821 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7822 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7823 return build_complex (type, rpart, negate_expr (ipart));
7825 if (TREE_CODE (arg0) == CONJ_EXPR)
7826 return fold_convert (type, TREE_OPERAND (arg0, 0));
7830 if (TREE_CODE (arg0) == INTEGER_CST)
7831 return fold_not_const (arg0, type);
7832 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7833 return TREE_OPERAND (arg0, 0);
7834 /* Convert ~ (-A) to A - 1. */
7835 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7836 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7837 build_int_cst (type, 1));
7838 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7839 else if (INTEGRAL_TYPE_P (type)
7840 && ((TREE_CODE (arg0) == MINUS_EXPR
7841 && integer_onep (TREE_OPERAND (arg0, 1)))
7842 || (TREE_CODE (arg0) == PLUS_EXPR
7843 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7844 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7845 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7846 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7847 && (tem = fold_unary (BIT_NOT_EXPR, type,
7849 TREE_OPERAND (arg0, 0)))))
7850 return fold_build2 (BIT_XOR_EXPR, type, tem,
7851 fold_convert (type, TREE_OPERAND (arg0, 1)));
7852 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7853 && (tem = fold_unary (BIT_NOT_EXPR, type,
7855 TREE_OPERAND (arg0, 1)))))
7856 return fold_build2 (BIT_XOR_EXPR, type,
7857 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7861 case TRUTH_NOT_EXPR:
7862 /* The argument to invert_truthvalue must have Boolean type. */
7863 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7864 arg0 = fold_convert (boolean_type_node, arg0);
7866 /* Note that the operand of this must be an int
7867 and its values must be 0 or 1.
7868 ("true" is a fixed value perhaps depending on the language,
7869 but we don't handle values other than 1 correctly yet.) */
7870 tem = fold_truth_not_expr (arg0);
7873 return fold_convert (type, tem);
7876 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7877 return fold_convert (type, arg0);
7878 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7879 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7880 TREE_OPERAND (arg0, 1));
7881 if (TREE_CODE (arg0) == COMPLEX_CST)
7882 return fold_convert (type, TREE_REALPART (arg0));
7883 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7885 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7886 tem = fold_build2 (TREE_CODE (arg0), itype,
7887 fold_build1 (REALPART_EXPR, itype,
7888 TREE_OPERAND (arg0, 0)),
7889 fold_build1 (REALPART_EXPR, itype,
7890 TREE_OPERAND (arg0, 1)));
7891 return fold_convert (type, tem);
7893 if (TREE_CODE (arg0) == CONJ_EXPR)
7895 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7896 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7897 return fold_convert (type, tem);
7902 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7903 return fold_convert (type, integer_zero_node);
7904 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7905 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7906 TREE_OPERAND (arg0, 0));
7907 if (TREE_CODE (arg0) == COMPLEX_CST)
7908 return fold_convert (type, TREE_IMAGPART (arg0));
7909 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7911 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7912 tem = fold_build2 (TREE_CODE (arg0), itype,
7913 fold_build1 (IMAGPART_EXPR, itype,
7914 TREE_OPERAND (arg0, 0)),
7915 fold_build1 (IMAGPART_EXPR, itype,
7916 TREE_OPERAND (arg0, 1)));
7917 return fold_convert (type, tem);
7919 if (TREE_CODE (arg0) == CONJ_EXPR)
7921 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7922 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7923 return fold_convert (type, negate_expr (tem));
7929 } /* switch (code) */
7932 /* Fold a binary expression of code CODE and type TYPE with operands
7933 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7934 Return the folded expression if folding is successful. Otherwise,
7935 return NULL_TREE. */
7938 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7940 enum tree_code compl_code;
7942 if (code == MIN_EXPR)
7943 compl_code = MAX_EXPR;
7944 else if (code == MAX_EXPR)
7945 compl_code = MIN_EXPR;
7949 /* MIN (MAX (a, b), b) == b. */
7950 if (TREE_CODE (op0) == compl_code
7951 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7952 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7954 /* MIN (MAX (b, a), b) == b. */
7955 if (TREE_CODE (op0) == compl_code
7956 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7957 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7958 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7960 /* MIN (a, MAX (a, b)) == a. */
7961 if (TREE_CODE (op1) == compl_code
7962 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7963 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7964 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7966 /* MIN (a, MAX (b, a)) == a. */
7967 if (TREE_CODE (op1) == compl_code
7968 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7969 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7970 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7975 /* Subroutine of fold_binary. This routine performs all of the
7976 transformations that are common to the equality/inequality
7977 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7978 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7979 fold_binary should call fold_binary. Fold a comparison with
7980 tree code CODE and type TYPE with operands OP0 and OP1. Return
7981 the folded comparison or NULL_TREE. */
7984 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7986 tree arg0, arg1, tem;
7991 STRIP_SIGN_NOPS (arg0);
7992 STRIP_SIGN_NOPS (arg1);
7994 tem = fold_relational_const (code, type, arg0, arg1);
7995 if (tem != NULL_TREE)
7998 /* If one arg is a real or integer constant, put it last. */
7999 if (tree_swap_operands_p (arg0, arg1, true))
8000 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8002 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8003 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8004 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8005 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8006 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8007 && (TREE_CODE (arg1) == INTEGER_CST
8008 && !TREE_OVERFLOW (arg1)))
8010 tree const1 = TREE_OPERAND (arg0, 1);
8012 tree variable = TREE_OPERAND (arg0, 0);
8015 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8017 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8018 TREE_TYPE (arg1), const2, const1);
8019 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8020 && (TREE_CODE (lhs) != INTEGER_CST
8021 || !TREE_OVERFLOW (lhs)))
8023 fold_overflow_warning (("assuming signed overflow does not occur "
8024 "when changing X +- C1 cmp C2 to "
8026 WARN_STRICT_OVERFLOW_COMPARISON);
8027 return fold_build2 (code, type, variable, lhs);
8031 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8032 same object, then we can fold this to a comparison of the two offsets in
8033 signed size type. This is possible because pointer arithmetic is
8034 restricted to retain within an object and overflow on pointer differences
8035 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t.
8037 We check flag_wrapv directly because pointers types are unsigned,
8038 and therefore TYPE_OVERFLOW_WRAPS returns true for them. That is
8039 normally what we want to avoid certain odd overflow cases, but
8041 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8043 && !TYPE_OVERFLOW_TRAPS (TREE_TYPE (arg0)))
8045 tree base0, offset0, base1, offset1;
8047 if (extract_array_ref (arg0, &base0, &offset0)
8048 && extract_array_ref (arg1, &base1, &offset1)
8049 && operand_equal_p (base0, base1, 0))
8051 tree signed_size_type_node;
8052 signed_size_type_node = signed_type_for (size_type_node);
8054 /* By converting to signed size type we cover middle-end pointer
8055 arithmetic which operates on unsigned pointer types of size
8056 type size and ARRAY_REF offsets which are properly sign or
8057 zero extended from their type in case it is narrower than
8059 if (offset0 == NULL_TREE)
8060 offset0 = build_int_cst (signed_size_type_node, 0);
8062 offset0 = fold_convert (signed_size_type_node, offset0);
8063 if (offset1 == NULL_TREE)
8064 offset1 = build_int_cst (signed_size_type_node, 0);
8066 offset1 = fold_convert (signed_size_type_node, offset1);
8068 return fold_build2 (code, type, offset0, offset1);
8072 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8074 tree targ0 = strip_float_extensions (arg0);
8075 tree targ1 = strip_float_extensions (arg1);
8076 tree newtype = TREE_TYPE (targ0);
8078 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8079 newtype = TREE_TYPE (targ1);
8081 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8082 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8083 return fold_build2 (code, type, fold_convert (newtype, targ0),
8084 fold_convert (newtype, targ1));
8086 /* (-a) CMP (-b) -> b CMP a */
8087 if (TREE_CODE (arg0) == NEGATE_EXPR
8088 && TREE_CODE (arg1) == NEGATE_EXPR)
8089 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8090 TREE_OPERAND (arg0, 0));
8092 if (TREE_CODE (arg1) == REAL_CST)
8094 REAL_VALUE_TYPE cst;
8095 cst = TREE_REAL_CST (arg1);
8097 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8098 if (TREE_CODE (arg0) == NEGATE_EXPR)
8099 return fold_build2 (swap_tree_comparison (code), type,
8100 TREE_OPERAND (arg0, 0),
8101 build_real (TREE_TYPE (arg1),
8102 REAL_VALUE_NEGATE (cst)));
8104 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8105 /* a CMP (-0) -> a CMP 0 */
8106 if (REAL_VALUE_MINUS_ZERO (cst))
8107 return fold_build2 (code, type, arg0,
8108 build_real (TREE_TYPE (arg1), dconst0));
8110 /* x != NaN is always true, other ops are always false. */
8111 if (REAL_VALUE_ISNAN (cst)
8112 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8114 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8115 return omit_one_operand (type, tem, arg0);
8118 /* Fold comparisons against infinity. */
8119 if (REAL_VALUE_ISINF (cst))
8121 tem = fold_inf_compare (code, type, arg0, arg1);
8122 if (tem != NULL_TREE)
8127 /* If this is a comparison of a real constant with a PLUS_EXPR
8128 or a MINUS_EXPR of a real constant, we can convert it into a
8129 comparison with a revised real constant as long as no overflow
8130 occurs when unsafe_math_optimizations are enabled. */
8131 if (flag_unsafe_math_optimizations
8132 && TREE_CODE (arg1) == REAL_CST
8133 && (TREE_CODE (arg0) == PLUS_EXPR
8134 || TREE_CODE (arg0) == MINUS_EXPR)
8135 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8136 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8137 ? MINUS_EXPR : PLUS_EXPR,
8138 arg1, TREE_OPERAND (arg0, 1), 0))
8139 && ! TREE_CONSTANT_OVERFLOW (tem))
8140 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8142 /* Likewise, we can simplify a comparison of a real constant with
8143 a MINUS_EXPR whose first operand is also a real constant, i.e.
8144 (c1 - x) < c2 becomes x > c1-c2. */
8145 if (flag_unsafe_math_optimizations
8146 && TREE_CODE (arg1) == REAL_CST
8147 && TREE_CODE (arg0) == MINUS_EXPR
8148 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8149 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8151 && ! TREE_CONSTANT_OVERFLOW (tem))
8152 return fold_build2 (swap_tree_comparison (code), type,
8153 TREE_OPERAND (arg0, 1), tem);
8155 /* Fold comparisons against built-in math functions. */
8156 if (TREE_CODE (arg1) == REAL_CST
8157 && flag_unsafe_math_optimizations
8158 && ! flag_errno_math)
8160 enum built_in_function fcode = builtin_mathfn_code (arg0);
8162 if (fcode != END_BUILTINS)
8164 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8165 if (tem != NULL_TREE)
8171 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8172 if (TREE_CONSTANT (arg1)
8173 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8174 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8175 /* This optimization is invalid for ordered comparisons
8176 if CONST+INCR overflows or if foo+incr might overflow.
8177 This optimization is invalid for floating point due to rounding.
8178 For pointer types we assume overflow doesn't happen. */
8179 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8180 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8181 && (code == EQ_EXPR || code == NE_EXPR))))
8183 tree varop, newconst;
8185 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8187 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8188 arg1, TREE_OPERAND (arg0, 1));
8189 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8190 TREE_OPERAND (arg0, 0),
8191 TREE_OPERAND (arg0, 1));
8195 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8196 arg1, TREE_OPERAND (arg0, 1));
8197 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8198 TREE_OPERAND (arg0, 0),
8199 TREE_OPERAND (arg0, 1));
8203 /* If VAROP is a reference to a bitfield, we must mask
8204 the constant by the width of the field. */
8205 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8206 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8207 && host_integerp (DECL_SIZE (TREE_OPERAND
8208 (TREE_OPERAND (varop, 0), 1)), 1))
8210 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8211 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8212 tree folded_compare, shift;
8214 /* First check whether the comparison would come out
8215 always the same. If we don't do that we would
8216 change the meaning with the masking. */
8217 folded_compare = fold_build2 (code, type,
8218 TREE_OPERAND (varop, 0), arg1);
8219 if (TREE_CODE (folded_compare) == INTEGER_CST)
8220 return omit_one_operand (type, folded_compare, varop);
8222 shift = build_int_cst (NULL_TREE,
8223 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8224 shift = fold_convert (TREE_TYPE (varop), shift);
8225 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8227 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8231 return fold_build2 (code, type, varop, newconst);
8234 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8235 && (TREE_CODE (arg0) == NOP_EXPR
8236 || TREE_CODE (arg0) == CONVERT_EXPR))
8238 /* If we are widening one operand of an integer comparison,
8239 see if the other operand is similarly being widened. Perhaps we
8240 can do the comparison in the narrower type. */
8241 tem = fold_widened_comparison (code, type, arg0, arg1);
8245 /* Or if we are changing signedness. */
8246 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8251 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8252 constant, we can simplify it. */
8253 if (TREE_CODE (arg1) == INTEGER_CST
8254 && (TREE_CODE (arg0) == MIN_EXPR
8255 || TREE_CODE (arg0) == MAX_EXPR)
8256 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8258 tem = optimize_minmax_comparison (code, type, op0, op1);
8263 /* Simplify comparison of something with itself. (For IEEE
8264 floating-point, we can only do some of these simplifications.) */
8265 if (operand_equal_p (arg0, arg1, 0))
8270 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8271 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8272 return constant_boolean_node (1, type);
8277 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8278 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8279 return constant_boolean_node (1, type);
8280 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8283 /* For NE, we can only do this simplification if integer
8284 or we don't honor IEEE floating point NaNs. */
8285 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8286 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8288 /* ... fall through ... */
8291 return constant_boolean_node (0, type);
8297 /* If we are comparing an expression that just has comparisons
8298 of two integer values, arithmetic expressions of those comparisons,
8299 and constants, we can simplify it. There are only three cases
8300 to check: the two values can either be equal, the first can be
8301 greater, or the second can be greater. Fold the expression for
8302 those three values. Since each value must be 0 or 1, we have
8303 eight possibilities, each of which corresponds to the constant 0
8304 or 1 or one of the six possible comparisons.
8306 This handles common cases like (a > b) == 0 but also handles
8307 expressions like ((x > y) - (y > x)) > 0, which supposedly
8308 occur in macroized code. */
8310 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8312 tree cval1 = 0, cval2 = 0;
8315 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8316 /* Don't handle degenerate cases here; they should already
8317 have been handled anyway. */
8318 && cval1 != 0 && cval2 != 0
8319 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8320 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8321 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8322 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8323 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8324 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8325 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8327 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8328 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8330 /* We can't just pass T to eval_subst in case cval1 or cval2
8331 was the same as ARG1. */
8334 = fold_build2 (code, type,
8335 eval_subst (arg0, cval1, maxval,
8339 = fold_build2 (code, type,
8340 eval_subst (arg0, cval1, maxval,
8344 = fold_build2 (code, type,
8345 eval_subst (arg0, cval1, minval,
8349 /* All three of these results should be 0 or 1. Confirm they are.
8350 Then use those values to select the proper code to use. */
8352 if (TREE_CODE (high_result) == INTEGER_CST
8353 && TREE_CODE (equal_result) == INTEGER_CST
8354 && TREE_CODE (low_result) == INTEGER_CST)
8356 /* Make a 3-bit mask with the high-order bit being the
8357 value for `>', the next for '=', and the low for '<'. */
8358 switch ((integer_onep (high_result) * 4)
8359 + (integer_onep (equal_result) * 2)
8360 + integer_onep (low_result))
8364 return omit_one_operand (type, integer_zero_node, arg0);
8385 return omit_one_operand (type, integer_one_node, arg0);
8389 return save_expr (build2 (code, type, cval1, cval2));
8390 return fold_build2 (code, type, cval1, cval2);
8395 /* Fold a comparison of the address of COMPONENT_REFs with the same
8396 type and component to a comparison of the address of the base
8397 object. In short, &x->a OP &y->a to x OP y and
8398 &x->a OP &y.a to x OP &y */
8399 if (TREE_CODE (arg0) == ADDR_EXPR
8400 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8401 && TREE_CODE (arg1) == ADDR_EXPR
8402 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8404 tree cref0 = TREE_OPERAND (arg0, 0);
8405 tree cref1 = TREE_OPERAND (arg1, 0);
8406 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8408 tree op0 = TREE_OPERAND (cref0, 0);
8409 tree op1 = TREE_OPERAND (cref1, 0);
8410 return fold_build2 (code, type,
8411 build_fold_addr_expr (op0),
8412 build_fold_addr_expr (op1));
8416 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8417 into a single range test. */
8418 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8419 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8420 && TREE_CODE (arg1) == INTEGER_CST
8421 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8422 && !integer_zerop (TREE_OPERAND (arg0, 1))
8423 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8424 && !TREE_OVERFLOW (arg1))
8426 tem = fold_div_compare (code, type, arg0, arg1);
8427 if (tem != NULL_TREE)
8435 /* Subroutine of fold_binary. Optimize complex multiplications of the
8436 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8437 argument EXPR represents the expression "z" of type TYPE. */
8440 fold_mult_zconjz (tree type, tree expr)
8442 tree itype = TREE_TYPE (type);
8443 tree rpart, ipart, tem;
8445 if (TREE_CODE (expr) == COMPLEX_EXPR)
8447 rpart = TREE_OPERAND (expr, 0);
8448 ipart = TREE_OPERAND (expr, 1);
8450 else if (TREE_CODE (expr) == COMPLEX_CST)
8452 rpart = TREE_REALPART (expr);
8453 ipart = TREE_IMAGPART (expr);
8457 expr = save_expr (expr);
8458 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8459 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8462 rpart = save_expr (rpart);
8463 ipart = save_expr (ipart);
8464 tem = fold_build2 (PLUS_EXPR, itype,
8465 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8466 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8467 return fold_build2 (COMPLEX_EXPR, type, tem,
8468 fold_convert (itype, integer_zero_node));
8472 /* Fold a binary expression of code CODE and type TYPE with operands
8473 OP0 and OP1. Return the folded expression if folding is
8474 successful. Otherwise, return NULL_TREE. */
8477 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8479 enum tree_code_class kind = TREE_CODE_CLASS (code);
8480 tree arg0, arg1, tem;
8481 tree t1 = NULL_TREE;
8482 bool strict_overflow_p;
8484 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8485 && TREE_CODE_LENGTH (code) == 2
8487 && op1 != NULL_TREE);
8492 /* Strip any conversions that don't change the mode. This is
8493 safe for every expression, except for a comparison expression
8494 because its signedness is derived from its operands. So, in
8495 the latter case, only strip conversions that don't change the
8498 Note that this is done as an internal manipulation within the
8499 constant folder, in order to find the simplest representation
8500 of the arguments so that their form can be studied. In any
8501 cases, the appropriate type conversions should be put back in
8502 the tree that will get out of the constant folder. */
8504 if (kind == tcc_comparison)
8506 STRIP_SIGN_NOPS (arg0);
8507 STRIP_SIGN_NOPS (arg1);
8515 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8516 constant but we can't do arithmetic on them. */
8517 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8518 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8519 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8520 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8522 if (kind == tcc_binary)
8523 tem = const_binop (code, arg0, arg1, 0);
8524 else if (kind == tcc_comparison)
8525 tem = fold_relational_const (code, type, arg0, arg1);
8529 if (tem != NULL_TREE)
8531 if (TREE_TYPE (tem) != type)
8532 tem = fold_convert (type, tem);
8537 /* If this is a commutative operation, and ARG0 is a constant, move it
8538 to ARG1 to reduce the number of tests below. */
8539 if (commutative_tree_code (code)
8540 && tree_swap_operands_p (arg0, arg1, true))
8541 return fold_build2 (code, type, op1, op0);
8543 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8545 First check for cases where an arithmetic operation is applied to a
8546 compound, conditional, or comparison operation. Push the arithmetic
8547 operation inside the compound or conditional to see if any folding
8548 can then be done. Convert comparison to conditional for this purpose.
8549 The also optimizes non-constant cases that used to be done in
8552 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8553 one of the operands is a comparison and the other is a comparison, a
8554 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8555 code below would make the expression more complex. Change it to a
8556 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8557 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8559 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8560 || code == EQ_EXPR || code == NE_EXPR)
8561 && ((truth_value_p (TREE_CODE (arg0))
8562 && (truth_value_p (TREE_CODE (arg1))
8563 || (TREE_CODE (arg1) == BIT_AND_EXPR
8564 && integer_onep (TREE_OPERAND (arg1, 1)))))
8565 || (truth_value_p (TREE_CODE (arg1))
8566 && (truth_value_p (TREE_CODE (arg0))
8567 || (TREE_CODE (arg0) == BIT_AND_EXPR
8568 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8570 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8571 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8574 fold_convert (boolean_type_node, arg0),
8575 fold_convert (boolean_type_node, arg1));
8577 if (code == EQ_EXPR)
8578 tem = invert_truthvalue (tem);
8580 return fold_convert (type, tem);
8583 if (TREE_CODE_CLASS (code) == tcc_binary
8584 || TREE_CODE_CLASS (code) == tcc_comparison)
8586 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8587 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8588 fold_build2 (code, type,
8589 TREE_OPERAND (arg0, 1), op1));
8590 if (TREE_CODE (arg1) == COMPOUND_EXPR
8591 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8592 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8593 fold_build2 (code, type,
8594 op0, TREE_OPERAND (arg1, 1)));
8596 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8598 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8600 /*cond_first_p=*/1);
8601 if (tem != NULL_TREE)
8605 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8607 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8609 /*cond_first_p=*/0);
8610 if (tem != NULL_TREE)
8618 /* A + (-B) -> A - B */
8619 if (TREE_CODE (arg1) == NEGATE_EXPR)
8620 return fold_build2 (MINUS_EXPR, type,
8621 fold_convert (type, arg0),
8622 fold_convert (type, TREE_OPERAND (arg1, 0)));
8623 /* (-A) + B -> B - A */
8624 if (TREE_CODE (arg0) == NEGATE_EXPR
8625 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8626 return fold_build2 (MINUS_EXPR, type,
8627 fold_convert (type, arg1),
8628 fold_convert (type, TREE_OPERAND (arg0, 0)));
8629 /* Convert ~A + 1 to -A. */
8630 if (INTEGRAL_TYPE_P (type)
8631 && TREE_CODE (arg0) == BIT_NOT_EXPR
8632 && integer_onep (arg1))
8633 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8635 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8637 if ((TREE_CODE (arg0) == MULT_EXPR
8638 || TREE_CODE (arg1) == MULT_EXPR)
8639 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8641 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8646 if (! FLOAT_TYPE_P (type))
8648 if (integer_zerop (arg1))
8649 return non_lvalue (fold_convert (type, arg0));
8651 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8652 with a constant, and the two constants have no bits in common,
8653 we should treat this as a BIT_IOR_EXPR since this may produce more
8655 if (TREE_CODE (arg0) == BIT_AND_EXPR
8656 && TREE_CODE (arg1) == BIT_AND_EXPR
8657 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8658 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8659 && integer_zerop (const_binop (BIT_AND_EXPR,
8660 TREE_OPERAND (arg0, 1),
8661 TREE_OPERAND (arg1, 1), 0)))
8663 code = BIT_IOR_EXPR;
8667 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8668 (plus (plus (mult) (mult)) (foo)) so that we can
8669 take advantage of the factoring cases below. */
8670 if (((TREE_CODE (arg0) == PLUS_EXPR
8671 || TREE_CODE (arg0) == MINUS_EXPR)
8672 && TREE_CODE (arg1) == MULT_EXPR)
8673 || ((TREE_CODE (arg1) == PLUS_EXPR
8674 || TREE_CODE (arg1) == MINUS_EXPR)
8675 && TREE_CODE (arg0) == MULT_EXPR))
8677 tree parg0, parg1, parg, marg;
8678 enum tree_code pcode;
8680 if (TREE_CODE (arg1) == MULT_EXPR)
8681 parg = arg0, marg = arg1;
8683 parg = arg1, marg = arg0;
8684 pcode = TREE_CODE (parg);
8685 parg0 = TREE_OPERAND (parg, 0);
8686 parg1 = TREE_OPERAND (parg, 1);
8690 if (TREE_CODE (parg0) == MULT_EXPR
8691 && TREE_CODE (parg1) != MULT_EXPR)
8692 return fold_build2 (pcode, type,
8693 fold_build2 (PLUS_EXPR, type,
8694 fold_convert (type, parg0),
8695 fold_convert (type, marg)),
8696 fold_convert (type, parg1));
8697 if (TREE_CODE (parg0) != MULT_EXPR
8698 && TREE_CODE (parg1) == MULT_EXPR)
8699 return fold_build2 (PLUS_EXPR, type,
8700 fold_convert (type, parg0),
8701 fold_build2 (pcode, type,
8702 fold_convert (type, marg),
8707 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8708 of the array. Loop optimizer sometimes produce this type of
8710 if (TREE_CODE (arg0) == ADDR_EXPR)
8712 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8714 return fold_convert (type, tem);
8716 else if (TREE_CODE (arg1) == ADDR_EXPR)
8718 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8720 return fold_convert (type, tem);
8725 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8726 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8727 return non_lvalue (fold_convert (type, arg0));
8729 /* Likewise if the operands are reversed. */
8730 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8731 return non_lvalue (fold_convert (type, arg1));
8733 /* Convert X + -C into X - C. */
8734 if (TREE_CODE (arg1) == REAL_CST
8735 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8737 tem = fold_negate_const (arg1, type);
8738 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8739 return fold_build2 (MINUS_EXPR, type,
8740 fold_convert (type, arg0),
8741 fold_convert (type, tem));
8744 if (flag_unsafe_math_optimizations
8745 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8746 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8747 && (tem = distribute_real_division (code, type, arg0, arg1)))
8750 /* Convert x+x into x*2.0. */
8751 if (operand_equal_p (arg0, arg1, 0)
8752 && SCALAR_FLOAT_TYPE_P (type))
8753 return fold_build2 (MULT_EXPR, type, arg0,
8754 build_real (type, dconst2));
8756 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8757 if (flag_unsafe_math_optimizations
8758 && TREE_CODE (arg1) == PLUS_EXPR
8759 && TREE_CODE (arg0) != MULT_EXPR)
8761 tree tree10 = TREE_OPERAND (arg1, 0);
8762 tree tree11 = TREE_OPERAND (arg1, 1);
8763 if (TREE_CODE (tree11) == MULT_EXPR
8764 && TREE_CODE (tree10) == MULT_EXPR)
8767 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8768 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8771 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8772 if (flag_unsafe_math_optimizations
8773 && TREE_CODE (arg0) == PLUS_EXPR
8774 && TREE_CODE (arg1) != MULT_EXPR)
8776 tree tree00 = TREE_OPERAND (arg0, 0);
8777 tree tree01 = TREE_OPERAND (arg0, 1);
8778 if (TREE_CODE (tree01) == MULT_EXPR
8779 && TREE_CODE (tree00) == MULT_EXPR)
8782 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8783 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
8789 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8790 is a rotate of A by C1 bits. */
8791 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8792 is a rotate of A by B bits. */
8794 enum tree_code code0, code1;
8795 code0 = TREE_CODE (arg0);
8796 code1 = TREE_CODE (arg1);
8797 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
8798 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
8799 && operand_equal_p (TREE_OPERAND (arg0, 0),
8800 TREE_OPERAND (arg1, 0), 0)
8801 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8803 tree tree01, tree11;
8804 enum tree_code code01, code11;
8806 tree01 = TREE_OPERAND (arg0, 1);
8807 tree11 = TREE_OPERAND (arg1, 1);
8808 STRIP_NOPS (tree01);
8809 STRIP_NOPS (tree11);
8810 code01 = TREE_CODE (tree01);
8811 code11 = TREE_CODE (tree11);
8812 if (code01 == INTEGER_CST
8813 && code11 == INTEGER_CST
8814 && TREE_INT_CST_HIGH (tree01) == 0
8815 && TREE_INT_CST_HIGH (tree11) == 0
8816 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
8817 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
8818 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
8819 code0 == LSHIFT_EXPR ? tree01 : tree11);
8820 else if (code11 == MINUS_EXPR)
8822 tree tree110, tree111;
8823 tree110 = TREE_OPERAND (tree11, 0);
8824 tree111 = TREE_OPERAND (tree11, 1);
8825 STRIP_NOPS (tree110);
8826 STRIP_NOPS (tree111);
8827 if (TREE_CODE (tree110) == INTEGER_CST
8828 && 0 == compare_tree_int (tree110,
8830 (TREE_TYPE (TREE_OPERAND
8832 && operand_equal_p (tree01, tree111, 0))
8833 return build2 ((code0 == LSHIFT_EXPR
8836 type, TREE_OPERAND (arg0, 0), tree01);
8838 else if (code01 == MINUS_EXPR)
8840 tree tree010, tree011;
8841 tree010 = TREE_OPERAND (tree01, 0);
8842 tree011 = TREE_OPERAND (tree01, 1);
8843 STRIP_NOPS (tree010);
8844 STRIP_NOPS (tree011);
8845 if (TREE_CODE (tree010) == INTEGER_CST
8846 && 0 == compare_tree_int (tree010,
8848 (TREE_TYPE (TREE_OPERAND
8850 && operand_equal_p (tree11, tree011, 0))
8851 return build2 ((code0 != LSHIFT_EXPR
8854 type, TREE_OPERAND (arg0, 0), tree11);
8860 /* In most languages, can't associate operations on floats through
8861 parentheses. Rather than remember where the parentheses were, we
8862 don't associate floats at all, unless the user has specified
8863 -funsafe-math-optimizations. */
8865 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8867 tree var0, con0, lit0, minus_lit0;
8868 tree var1, con1, lit1, minus_lit1;
8871 /* Split both trees into variables, constants, and literals. Then
8872 associate each group together, the constants with literals,
8873 then the result with variables. This increases the chances of
8874 literals being recombined later and of generating relocatable
8875 expressions for the sum of a constant and literal. */
8876 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
8877 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
8878 code == MINUS_EXPR);
8880 /* With undefined overflow we can only associate constants
8881 with one variable. */
8882 if ((POINTER_TYPE_P (type)
8883 || (INTEGRAL_TYPE_P (type)
8884 && !(TYPE_UNSIGNED (type) || flag_wrapv)))
8890 if (TREE_CODE (tmp0) == NEGATE_EXPR)
8891 tmp0 = TREE_OPERAND (tmp0, 0);
8892 if (TREE_CODE (tmp1) == NEGATE_EXPR)
8893 tmp1 = TREE_OPERAND (tmp1, 0);
8894 /* The only case we can still associate with two variables
8895 is if they are the same, modulo negation. */
8896 if (!operand_equal_p (tmp0, tmp1, 0))
8900 /* Only do something if we found more than two objects. Otherwise,
8901 nothing has changed and we risk infinite recursion. */
8903 && (2 < ((var0 != 0) + (var1 != 0)
8904 + (con0 != 0) + (con1 != 0)
8905 + (lit0 != 0) + (lit1 != 0)
8906 + (minus_lit0 != 0) + (minus_lit1 != 0))))
8908 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
8909 if (code == MINUS_EXPR)
8912 var0 = associate_trees (var0, var1, code, type);
8913 con0 = associate_trees (con0, con1, code, type);
8914 lit0 = associate_trees (lit0, lit1, code, type);
8915 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
8917 /* Preserve the MINUS_EXPR if the negative part of the literal is
8918 greater than the positive part. Otherwise, the multiplicative
8919 folding code (i.e extract_muldiv) may be fooled in case
8920 unsigned constants are subtracted, like in the following
8921 example: ((X*2 + 4) - 8U)/2. */
8922 if (minus_lit0 && lit0)
8924 if (TREE_CODE (lit0) == INTEGER_CST
8925 && TREE_CODE (minus_lit0) == INTEGER_CST
8926 && tree_int_cst_lt (lit0, minus_lit0))
8928 minus_lit0 = associate_trees (minus_lit0, lit0,
8934 lit0 = associate_trees (lit0, minus_lit0,
8942 return fold_convert (type,
8943 associate_trees (var0, minus_lit0,
8947 con0 = associate_trees (con0, minus_lit0,
8949 return fold_convert (type,
8950 associate_trees (var0, con0,
8955 con0 = associate_trees (con0, lit0, code, type);
8956 return fold_convert (type, associate_trees (var0, con0,
8964 /* A - (-B) -> A + B */
8965 if (TREE_CODE (arg1) == NEGATE_EXPR)
8966 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
8967 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
8968 if (TREE_CODE (arg0) == NEGATE_EXPR
8969 && (FLOAT_TYPE_P (type)
8970 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
8971 && negate_expr_p (arg1)
8972 && reorder_operands_p (arg0, arg1))
8973 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
8974 TREE_OPERAND (arg0, 0));
8975 /* Convert -A - 1 to ~A. */
8976 if (INTEGRAL_TYPE_P (type)
8977 && TREE_CODE (arg0) == NEGATE_EXPR
8978 && integer_onep (arg1))
8979 return fold_build1 (BIT_NOT_EXPR, type,
8980 fold_convert (type, TREE_OPERAND (arg0, 0)));
8982 /* Convert -1 - A to ~A. */
8983 if (INTEGRAL_TYPE_P (type)
8984 && integer_all_onesp (arg0))
8985 return fold_build1 (BIT_NOT_EXPR, type, arg1);
8987 if (! FLOAT_TYPE_P (type))
8989 if (integer_zerop (arg0))
8990 return negate_expr (fold_convert (type, arg1));
8991 if (integer_zerop (arg1))
8992 return non_lvalue (fold_convert (type, arg0));
8994 /* Fold A - (A & B) into ~B & A. */
8995 if (!TREE_SIDE_EFFECTS (arg0)
8996 && TREE_CODE (arg1) == BIT_AND_EXPR)
8998 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
8999 return fold_build2 (BIT_AND_EXPR, type,
9000 fold_build1 (BIT_NOT_EXPR, type,
9001 TREE_OPERAND (arg1, 0)),
9003 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9004 return fold_build2 (BIT_AND_EXPR, type,
9005 fold_build1 (BIT_NOT_EXPR, type,
9006 TREE_OPERAND (arg1, 1)),
9010 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9011 any power of 2 minus 1. */
9012 if (TREE_CODE (arg0) == BIT_AND_EXPR
9013 && TREE_CODE (arg1) == BIT_AND_EXPR
9014 && operand_equal_p (TREE_OPERAND (arg0, 0),
9015 TREE_OPERAND (arg1, 0), 0))
9017 tree mask0 = TREE_OPERAND (arg0, 1);
9018 tree mask1 = TREE_OPERAND (arg1, 1);
9019 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9021 if (operand_equal_p (tem, mask1, 0))
9023 tem = fold_build2 (BIT_XOR_EXPR, type,
9024 TREE_OPERAND (arg0, 0), mask1);
9025 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9030 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9031 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9032 return non_lvalue (fold_convert (type, arg0));
9034 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9035 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9036 (-ARG1 + ARG0) reduces to -ARG1. */
9037 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9038 return negate_expr (fold_convert (type, arg1));
9040 /* Fold &x - &x. This can happen from &x.foo - &x.
9041 This is unsafe for certain floats even in non-IEEE formats.
9042 In IEEE, it is unsafe because it does wrong for NaNs.
9043 Also note that operand_equal_p is always false if an operand
9046 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9047 && operand_equal_p (arg0, arg1, 0))
9048 return fold_convert (type, integer_zero_node);
9050 /* A - B -> A + (-B) if B is easily negatable. */
9051 if (negate_expr_p (arg1)
9052 && ((FLOAT_TYPE_P (type)
9053 /* Avoid this transformation if B is a positive REAL_CST. */
9054 && (TREE_CODE (arg1) != REAL_CST
9055 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9056 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
9057 return fold_build2 (PLUS_EXPR, type,
9058 fold_convert (type, arg0),
9059 fold_convert (type, negate_expr (arg1)));
9061 /* Try folding difference of addresses. */
9065 if ((TREE_CODE (arg0) == ADDR_EXPR
9066 || TREE_CODE (arg1) == ADDR_EXPR)
9067 && ptr_difference_const (arg0, arg1, &diff))
9068 return build_int_cst_type (type, diff);
9071 /* Fold &a[i] - &a[j] to i-j. */
9072 if (TREE_CODE (arg0) == ADDR_EXPR
9073 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9074 && TREE_CODE (arg1) == ADDR_EXPR
9075 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9077 tree aref0 = TREE_OPERAND (arg0, 0);
9078 tree aref1 = TREE_OPERAND (arg1, 0);
9079 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9080 TREE_OPERAND (aref1, 0), 0))
9082 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9083 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9084 tree esz = array_ref_element_size (aref0);
9085 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9086 return fold_build2 (MULT_EXPR, type, diff,
9087 fold_convert (type, esz));
9092 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9093 of the array. Loop optimizer sometimes produce this type of
9095 if (TREE_CODE (arg0) == ADDR_EXPR)
9097 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9099 return fold_convert (type, tem);
9102 if (flag_unsafe_math_optimizations
9103 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9104 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9105 && (tem = distribute_real_division (code, type, arg0, arg1)))
9108 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9110 if ((TREE_CODE (arg0) == MULT_EXPR
9111 || TREE_CODE (arg1) == MULT_EXPR)
9112 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9114 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9122 /* (-A) * (-B) -> A * B */
9123 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9124 return fold_build2 (MULT_EXPR, type,
9125 fold_convert (type, TREE_OPERAND (arg0, 0)),
9126 fold_convert (type, negate_expr (arg1)));
9127 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9128 return fold_build2 (MULT_EXPR, type,
9129 fold_convert (type, negate_expr (arg0)),
9130 fold_convert (type, TREE_OPERAND (arg1, 0)));
9132 if (! FLOAT_TYPE_P (type))
9134 if (integer_zerop (arg1))
9135 return omit_one_operand (type, arg1, arg0);
9136 if (integer_onep (arg1))
9137 return non_lvalue (fold_convert (type, arg0));
9138 /* Transform x * -1 into -x. */
9139 if (integer_all_onesp (arg1))
9140 return fold_convert (type, negate_expr (arg0));
9142 /* (a * (1 << b)) is (a << b) */
9143 if (TREE_CODE (arg1) == LSHIFT_EXPR
9144 && integer_onep (TREE_OPERAND (arg1, 0)))
9145 return fold_build2 (LSHIFT_EXPR, type, arg0,
9146 TREE_OPERAND (arg1, 1));
9147 if (TREE_CODE (arg0) == LSHIFT_EXPR
9148 && integer_onep (TREE_OPERAND (arg0, 0)))
9149 return fold_build2 (LSHIFT_EXPR, type, arg1,
9150 TREE_OPERAND (arg0, 1));
9152 strict_overflow_p = false;
9153 if (TREE_CODE (arg1) == INTEGER_CST
9154 && 0 != (tem = extract_muldiv (op0,
9155 fold_convert (type, arg1),
9157 &strict_overflow_p)))
9159 if (strict_overflow_p)
9160 fold_overflow_warning (("assuming signed overflow does not "
9161 "occur when simplifying "
9163 WARN_STRICT_OVERFLOW_MISC);
9164 return fold_convert (type, tem);
9167 /* Optimize z * conj(z) for integer complex numbers. */
9168 if (TREE_CODE (arg0) == CONJ_EXPR
9169 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9170 return fold_mult_zconjz (type, arg1);
9171 if (TREE_CODE (arg1) == CONJ_EXPR
9172 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9173 return fold_mult_zconjz (type, arg0);
9177 /* Maybe fold x * 0 to 0. The expressions aren't the same
9178 when x is NaN, since x * 0 is also NaN. Nor are they the
9179 same in modes with signed zeros, since multiplying a
9180 negative value by 0 gives -0, not +0. */
9181 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9182 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9183 && real_zerop (arg1))
9184 return omit_one_operand (type, arg1, arg0);
9185 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9186 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9187 && real_onep (arg1))
9188 return non_lvalue (fold_convert (type, arg0));
9190 /* Transform x * -1.0 into -x. */
9191 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9192 && real_minus_onep (arg1))
9193 return fold_convert (type, negate_expr (arg0));
9195 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9196 if (flag_unsafe_math_optimizations
9197 && TREE_CODE (arg0) == RDIV_EXPR
9198 && TREE_CODE (arg1) == REAL_CST
9199 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9201 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9204 return fold_build2 (RDIV_EXPR, type, tem,
9205 TREE_OPERAND (arg0, 1));
9208 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9209 if (operand_equal_p (arg0, arg1, 0))
9211 tree tem = fold_strip_sign_ops (arg0);
9212 if (tem != NULL_TREE)
9214 tem = fold_convert (type, tem);
9215 return fold_build2 (MULT_EXPR, type, tem, tem);
9219 /* Optimize z * conj(z) for floating point complex numbers.
9220 Guarded by flag_unsafe_math_optimizations as non-finite
9221 imaginary components don't produce scalar results. */
9222 if (flag_unsafe_math_optimizations
9223 && TREE_CODE (arg0) == CONJ_EXPR
9224 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9225 return fold_mult_zconjz (type, arg1);
9226 if (flag_unsafe_math_optimizations
9227 && TREE_CODE (arg1) == CONJ_EXPR
9228 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9229 return fold_mult_zconjz (type, arg0);
9231 if (flag_unsafe_math_optimizations)
9233 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9234 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9236 /* Optimizations of root(...)*root(...). */
9237 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9239 tree rootfn, arg, arglist;
9240 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9241 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9243 /* Optimize sqrt(x)*sqrt(x) as x. */
9244 if (BUILTIN_SQRT_P (fcode0)
9245 && operand_equal_p (arg00, arg10, 0)
9246 && ! HONOR_SNANS (TYPE_MODE (type)))
9249 /* Optimize root(x)*root(y) as root(x*y). */
9250 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9251 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9252 arglist = build_tree_list (NULL_TREE, arg);
9253 return build_function_call_expr (rootfn, arglist);
9256 /* Optimize expN(x)*expN(y) as expN(x+y). */
9257 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9259 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9260 tree arg = fold_build2 (PLUS_EXPR, type,
9261 TREE_VALUE (TREE_OPERAND (arg0, 1)),
9262 TREE_VALUE (TREE_OPERAND (arg1, 1)));
9263 tree arglist = build_tree_list (NULL_TREE, arg);
9264 return build_function_call_expr (expfn, arglist);
9267 /* Optimizations of pow(...)*pow(...). */
9268 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9269 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9270 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9272 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9273 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9275 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9276 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9279 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9280 if (operand_equal_p (arg01, arg11, 0))
9282 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9283 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9284 tree arglist = tree_cons (NULL_TREE, arg,
9285 build_tree_list (NULL_TREE,
9287 return build_function_call_expr (powfn, arglist);
9290 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9291 if (operand_equal_p (arg00, arg10, 0))
9293 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9294 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9295 tree arglist = tree_cons (NULL_TREE, arg00,
9296 build_tree_list (NULL_TREE,
9298 return build_function_call_expr (powfn, arglist);
9302 /* Optimize tan(x)*cos(x) as sin(x). */
9303 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9304 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9305 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9306 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9307 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9308 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9309 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9310 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9312 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9314 if (sinfn != NULL_TREE)
9315 return build_function_call_expr (sinfn,
9316 TREE_OPERAND (arg0, 1));
9319 /* Optimize x*pow(x,c) as pow(x,c+1). */
9320 if (fcode1 == BUILT_IN_POW
9321 || fcode1 == BUILT_IN_POWF
9322 || fcode1 == BUILT_IN_POWL)
9324 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9325 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9327 if (TREE_CODE (arg11) == REAL_CST
9328 && ! TREE_CONSTANT_OVERFLOW (arg11)
9329 && operand_equal_p (arg0, arg10, 0))
9331 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9335 c = TREE_REAL_CST (arg11);
9336 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9337 arg = build_real (type, c);
9338 arglist = build_tree_list (NULL_TREE, arg);
9339 arglist = tree_cons (NULL_TREE, arg0, arglist);
9340 return build_function_call_expr (powfn, arglist);
9344 /* Optimize pow(x,c)*x as pow(x,c+1). */
9345 if (fcode0 == BUILT_IN_POW
9346 || fcode0 == BUILT_IN_POWF
9347 || fcode0 == BUILT_IN_POWL)
9349 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9350 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9352 if (TREE_CODE (arg01) == REAL_CST
9353 && ! TREE_CONSTANT_OVERFLOW (arg01)
9354 && operand_equal_p (arg1, arg00, 0))
9356 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9360 c = TREE_REAL_CST (arg01);
9361 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9362 arg = build_real (type, c);
9363 arglist = build_tree_list (NULL_TREE, arg);
9364 arglist = tree_cons (NULL_TREE, arg1, arglist);
9365 return build_function_call_expr (powfn, arglist);
9369 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9371 && operand_equal_p (arg0, arg1, 0))
9373 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9377 tree arg = build_real (type, dconst2);
9378 tree arglist = build_tree_list (NULL_TREE, arg);
9379 arglist = tree_cons (NULL_TREE, arg0, arglist);
9380 return build_function_call_expr (powfn, arglist);
9389 if (integer_all_onesp (arg1))
9390 return omit_one_operand (type, arg1, arg0);
9391 if (integer_zerop (arg1))
9392 return non_lvalue (fold_convert (type, arg0));
9393 if (operand_equal_p (arg0, arg1, 0))
9394 return non_lvalue (fold_convert (type, arg0));
9397 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9398 && INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9399 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9401 t1 = build_int_cst (type, -1);
9402 t1 = force_fit_type (t1, 0, false, false);
9403 return omit_one_operand (type, t1, arg1);
9407 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9408 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9409 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9411 t1 = build_int_cst (type, -1);
9412 t1 = force_fit_type (t1, 0, false, false);
9413 return omit_one_operand (type, t1, arg0);
9416 /* Canonicalize (X & C1) | C2. */
9417 if (TREE_CODE (arg0) == BIT_AND_EXPR
9418 && TREE_CODE (arg1) == INTEGER_CST
9419 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9421 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9422 int width = TYPE_PRECISION (type);
9423 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9424 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9425 hi2 = TREE_INT_CST_HIGH (arg1);
9426 lo2 = TREE_INT_CST_LOW (arg1);
9428 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9429 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9430 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9432 if (width > HOST_BITS_PER_WIDE_INT)
9434 mhi = (unsigned HOST_WIDE_INT) -1
9435 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9441 mlo = (unsigned HOST_WIDE_INT) -1
9442 >> (HOST_BITS_PER_WIDE_INT - width);
9445 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9446 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9447 return fold_build2 (BIT_IOR_EXPR, type,
9448 TREE_OPERAND (arg0, 0), arg1);
9450 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9453 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9454 return fold_build2 (BIT_IOR_EXPR, type,
9455 fold_build2 (BIT_AND_EXPR, type,
9456 TREE_OPERAND (arg0, 0),
9457 build_int_cst_wide (type,
9463 /* (X & Y) | Y is (X, Y). */
9464 if (TREE_CODE (arg0) == BIT_AND_EXPR
9465 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9466 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9467 /* (X & Y) | X is (Y, X). */
9468 if (TREE_CODE (arg0) == BIT_AND_EXPR
9469 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9470 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9471 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9472 /* X | (X & Y) is (Y, X). */
9473 if (TREE_CODE (arg1) == BIT_AND_EXPR
9474 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9475 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9476 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9477 /* X | (Y & X) is (Y, X). */
9478 if (TREE_CODE (arg1) == BIT_AND_EXPR
9479 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9480 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9481 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9483 t1 = distribute_bit_expr (code, type, arg0, arg1);
9484 if (t1 != NULL_TREE)
9487 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9489 This results in more efficient code for machines without a NAND
9490 instruction. Combine will canonicalize to the first form
9491 which will allow use of NAND instructions provided by the
9492 backend if they exist. */
9493 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9494 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9496 return fold_build1 (BIT_NOT_EXPR, type,
9497 build2 (BIT_AND_EXPR, type,
9498 TREE_OPERAND (arg0, 0),
9499 TREE_OPERAND (arg1, 0)));
9502 /* See if this can be simplified into a rotate first. If that
9503 is unsuccessful continue in the association code. */
9507 if (integer_zerop (arg1))
9508 return non_lvalue (fold_convert (type, arg0));
9509 if (integer_all_onesp (arg1))
9510 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9511 if (operand_equal_p (arg0, arg1, 0))
9512 return omit_one_operand (type, integer_zero_node, arg0);
9515 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9516 && INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9517 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9519 t1 = build_int_cst (type, -1);
9520 t1 = force_fit_type (t1, 0, false, false);
9521 return omit_one_operand (type, t1, arg1);
9525 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9526 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9527 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9529 t1 = build_int_cst (type, -1);
9530 t1 = force_fit_type (t1, 0, false, false);
9531 return omit_one_operand (type, t1, arg0);
9534 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9535 with a constant, and the two constants have no bits in common,
9536 we should treat this as a BIT_IOR_EXPR since this may produce more
9538 if (TREE_CODE (arg0) == BIT_AND_EXPR
9539 && TREE_CODE (arg1) == BIT_AND_EXPR
9540 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9541 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9542 && integer_zerop (const_binop (BIT_AND_EXPR,
9543 TREE_OPERAND (arg0, 1),
9544 TREE_OPERAND (arg1, 1), 0)))
9546 code = BIT_IOR_EXPR;
9550 /* (X | Y) ^ X -> Y & ~ X*/
9551 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9552 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9554 tree t2 = TREE_OPERAND (arg0, 1);
9555 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9557 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9558 fold_convert (type, t1));
9562 /* (Y | X) ^ X -> Y & ~ X*/
9563 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9564 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9566 tree t2 = TREE_OPERAND (arg0, 0);
9567 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9569 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9570 fold_convert (type, t1));
9574 /* X ^ (X | Y) -> Y & ~ X*/
9575 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9576 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9578 tree t2 = TREE_OPERAND (arg1, 1);
9579 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9581 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9582 fold_convert (type, t1));
9586 /* X ^ (Y | X) -> Y & ~ X*/
9587 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9588 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9590 tree t2 = TREE_OPERAND (arg1, 0);
9591 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9593 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9594 fold_convert (type, t1));
9598 /* Convert ~X ^ ~Y to X ^ Y. */
9599 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9600 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9601 return fold_build2 (code, type,
9602 fold_convert (type, TREE_OPERAND (arg0, 0)),
9603 fold_convert (type, TREE_OPERAND (arg1, 0)));
9605 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9606 if (TREE_CODE (arg0) == BIT_AND_EXPR
9607 && integer_onep (TREE_OPERAND (arg0, 1))
9608 && integer_onep (arg1))
9609 return fold_build2 (EQ_EXPR, type, arg0,
9610 build_int_cst (TREE_TYPE (arg0), 0));
9612 /* Fold (X & Y) ^ Y as ~X & Y. */
9613 if (TREE_CODE (arg0) == BIT_AND_EXPR
9614 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9616 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9617 return fold_build2 (BIT_AND_EXPR, type,
9618 fold_build1 (BIT_NOT_EXPR, type, tem),
9619 fold_convert (type, arg1));
9621 /* Fold (X & Y) ^ X as ~Y & X. */
9622 if (TREE_CODE (arg0) == BIT_AND_EXPR
9623 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9624 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9626 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9627 return fold_build2 (BIT_AND_EXPR, type,
9628 fold_build1 (BIT_NOT_EXPR, type, tem),
9629 fold_convert (type, arg1));
9631 /* Fold X ^ (X & Y) as X & ~Y. */
9632 if (TREE_CODE (arg1) == BIT_AND_EXPR
9633 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9635 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9636 return fold_build2 (BIT_AND_EXPR, type,
9637 fold_convert (type, arg0),
9638 fold_build1 (BIT_NOT_EXPR, type, tem));
9640 /* Fold X ^ (Y & X) as ~Y & X. */
9641 if (TREE_CODE (arg1) == BIT_AND_EXPR
9642 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9643 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9645 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9646 return fold_build2 (BIT_AND_EXPR, type,
9647 fold_build1 (BIT_NOT_EXPR, type, tem),
9648 fold_convert (type, arg0));
9651 /* See if this can be simplified into a rotate first. If that
9652 is unsuccessful continue in the association code. */
9656 if (integer_all_onesp (arg1))
9657 return non_lvalue (fold_convert (type, arg0));
9658 if (integer_zerop (arg1))
9659 return omit_one_operand (type, arg1, arg0);
9660 if (operand_equal_p (arg0, arg1, 0))
9661 return non_lvalue (fold_convert (type, arg0));
9663 /* ~X & X is always zero. */
9664 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9665 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9666 return omit_one_operand (type, integer_zero_node, arg1);
9668 /* X & ~X is always zero. */
9669 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9670 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9671 return omit_one_operand (type, integer_zero_node, arg0);
9673 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9674 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9675 && TREE_CODE (arg1) == INTEGER_CST
9676 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9677 return fold_build2 (BIT_IOR_EXPR, type,
9678 fold_build2 (BIT_AND_EXPR, type,
9679 TREE_OPERAND (arg0, 0), arg1),
9680 fold_build2 (BIT_AND_EXPR, type,
9681 TREE_OPERAND (arg0, 1), arg1));
9683 /* (X | Y) & Y is (X, Y). */
9684 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9685 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9686 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9687 /* (X | Y) & X is (Y, X). */
9688 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9689 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9690 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9691 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9692 /* X & (X | Y) is (Y, X). */
9693 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9694 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9695 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9696 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9697 /* X & (Y | X) is (Y, X). */
9698 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9699 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9700 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9701 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9703 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9704 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9705 && integer_onep (TREE_OPERAND (arg0, 1))
9706 && integer_onep (arg1))
9708 tem = TREE_OPERAND (arg0, 0);
9709 return fold_build2 (EQ_EXPR, type,
9710 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9711 build_int_cst (TREE_TYPE (tem), 1)),
9712 build_int_cst (TREE_TYPE (tem), 0));
9714 /* Fold ~X & 1 as (X & 1) == 0. */
9715 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9716 && integer_onep (arg1))
9718 tem = TREE_OPERAND (arg0, 0);
9719 return fold_build2 (EQ_EXPR, type,
9720 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9721 build_int_cst (TREE_TYPE (tem), 1)),
9722 build_int_cst (TREE_TYPE (tem), 0));
9725 /* Fold (X ^ Y) & Y as ~X & Y. */
9726 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9727 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9729 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9730 return fold_build2 (BIT_AND_EXPR, type,
9731 fold_build1 (BIT_NOT_EXPR, type, tem),
9732 fold_convert (type, arg1));
9734 /* Fold (X ^ Y) & X as ~Y & X. */
9735 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9736 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9737 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9739 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9740 return fold_build2 (BIT_AND_EXPR, type,
9741 fold_build1 (BIT_NOT_EXPR, type, tem),
9742 fold_convert (type, arg1));
9744 /* Fold X & (X ^ Y) as X & ~Y. */
9745 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9746 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9748 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9749 return fold_build2 (BIT_AND_EXPR, type,
9750 fold_convert (type, arg0),
9751 fold_build1 (BIT_NOT_EXPR, type, tem));
9753 /* Fold X & (Y ^ X) as ~Y & X. */
9754 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9755 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9756 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9758 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9759 return fold_build2 (BIT_AND_EXPR, type,
9760 fold_build1 (BIT_NOT_EXPR, type, tem),
9761 fold_convert (type, arg0));
9764 t1 = distribute_bit_expr (code, type, arg0, arg1);
9765 if (t1 != NULL_TREE)
9767 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9768 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9769 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9772 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9774 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9775 && (~TREE_INT_CST_LOW (arg1)
9776 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9777 return fold_convert (type, TREE_OPERAND (arg0, 0));
9780 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9782 This results in more efficient code for machines without a NOR
9783 instruction. Combine will canonicalize to the first form
9784 which will allow use of NOR instructions provided by the
9785 backend if they exist. */
9786 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9787 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9789 return fold_build1 (BIT_NOT_EXPR, type,
9790 build2 (BIT_IOR_EXPR, type,
9791 TREE_OPERAND (arg0, 0),
9792 TREE_OPERAND (arg1, 0)));
9798 /* Don't touch a floating-point divide by zero unless the mode
9799 of the constant can represent infinity. */
9800 if (TREE_CODE (arg1) == REAL_CST
9801 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
9802 && real_zerop (arg1))
9805 /* Optimize A / A to 1.0 if we don't care about
9806 NaNs or Infinities. Skip the transformation
9807 for non-real operands. */
9808 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
9809 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9810 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
9811 && operand_equal_p (arg0, arg1, 0))
9813 tree r = build_real (TREE_TYPE (arg0), dconst1);
9815 return omit_two_operands (type, r, arg0, arg1);
9818 /* The complex version of the above A / A optimization. */
9819 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9820 && operand_equal_p (arg0, arg1, 0))
9822 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
9823 if (! HONOR_NANS (TYPE_MODE (elem_type))
9824 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
9826 tree r = build_real (elem_type, dconst1);
9827 /* omit_two_operands will call fold_convert for us. */
9828 return omit_two_operands (type, r, arg0, arg1);
9832 /* (-A) / (-B) -> A / B */
9833 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9834 return fold_build2 (RDIV_EXPR, type,
9835 TREE_OPERAND (arg0, 0),
9836 negate_expr (arg1));
9837 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9838 return fold_build2 (RDIV_EXPR, type,
9840 TREE_OPERAND (arg1, 0));
9842 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
9843 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9844 && real_onep (arg1))
9845 return non_lvalue (fold_convert (type, arg0));
9847 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
9848 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9849 && real_minus_onep (arg1))
9850 return non_lvalue (fold_convert (type, negate_expr (arg0)));
9852 /* If ARG1 is a constant, we can convert this to a multiply by the
9853 reciprocal. This does not have the same rounding properties,
9854 so only do this if -funsafe-math-optimizations. We can actually
9855 always safely do it if ARG1 is a power of two, but it's hard to
9856 tell if it is or not in a portable manner. */
9857 if (TREE_CODE (arg1) == REAL_CST)
9859 if (flag_unsafe_math_optimizations
9860 && 0 != (tem = const_binop (code, build_real (type, dconst1),
9862 return fold_build2 (MULT_EXPR, type, arg0, tem);
9863 /* Find the reciprocal if optimizing and the result is exact. */
9867 r = TREE_REAL_CST (arg1);
9868 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
9870 tem = build_real (type, r);
9871 return fold_build2 (MULT_EXPR, type,
9872 fold_convert (type, arg0), tem);
9876 /* Convert A/B/C to A/(B*C). */
9877 if (flag_unsafe_math_optimizations
9878 && TREE_CODE (arg0) == RDIV_EXPR)
9879 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
9880 fold_build2 (MULT_EXPR, type,
9881 TREE_OPERAND (arg0, 1), arg1));
9883 /* Convert A/(B/C) to (A/B)*C. */
9884 if (flag_unsafe_math_optimizations
9885 && TREE_CODE (arg1) == RDIV_EXPR)
9886 return fold_build2 (MULT_EXPR, type,
9887 fold_build2 (RDIV_EXPR, type, arg0,
9888 TREE_OPERAND (arg1, 0)),
9889 TREE_OPERAND (arg1, 1));
9891 /* Convert C1/(X*C2) into (C1/C2)/X. */
9892 if (flag_unsafe_math_optimizations
9893 && TREE_CODE (arg1) == MULT_EXPR
9894 && TREE_CODE (arg0) == REAL_CST
9895 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
9897 tree tem = const_binop (RDIV_EXPR, arg0,
9898 TREE_OPERAND (arg1, 1), 0);
9900 return fold_build2 (RDIV_EXPR, type, tem,
9901 TREE_OPERAND (arg1, 0));
9904 if (flag_unsafe_math_optimizations)
9906 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9907 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9909 /* Optimize sin(x)/cos(x) as tan(x). */
9910 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
9911 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
9912 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
9913 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9914 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9916 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9918 if (tanfn != NULL_TREE)
9919 return build_function_call_expr (tanfn,
9920 TREE_OPERAND (arg0, 1));
9923 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
9924 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
9925 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
9926 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
9927 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9928 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9930 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9932 if (tanfn != NULL_TREE)
9934 tree tmp = TREE_OPERAND (arg0, 1);
9935 tmp = build_function_call_expr (tanfn, tmp);
9936 return fold_build2 (RDIV_EXPR, type,
9937 build_real (type, dconst1), tmp);
9941 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
9942 NaNs or Infinities. */
9943 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
9944 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
9945 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
9947 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9948 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9950 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9951 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9952 && operand_equal_p (arg00, arg01, 0))
9954 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9956 if (cosfn != NULL_TREE)
9957 return build_function_call_expr (cosfn,
9958 TREE_OPERAND (arg0, 1));
9962 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
9963 NaNs or Infinities. */
9964 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
9965 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
9966 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
9968 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9969 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9971 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9972 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9973 && operand_equal_p (arg00, arg01, 0))
9975 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9977 if (cosfn != NULL_TREE)
9979 tree tmp = TREE_OPERAND (arg0, 1);
9980 tmp = build_function_call_expr (cosfn, tmp);
9981 return fold_build2 (RDIV_EXPR, type,
9982 build_real (type, dconst1),
9988 /* Optimize pow(x,c)/x as pow(x,c-1). */
9989 if (fcode0 == BUILT_IN_POW
9990 || fcode0 == BUILT_IN_POWF
9991 || fcode0 == BUILT_IN_POWL)
9993 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9994 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
9995 if (TREE_CODE (arg01) == REAL_CST
9996 && ! TREE_CONSTANT_OVERFLOW (arg01)
9997 && operand_equal_p (arg1, arg00, 0))
9999 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10003 c = TREE_REAL_CST (arg01);
10004 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
10005 arg = build_real (type, c);
10006 arglist = build_tree_list (NULL_TREE, arg);
10007 arglist = tree_cons (NULL_TREE, arg1, arglist);
10008 return build_function_call_expr (powfn, arglist);
10012 /* Optimize x/expN(y) into x*expN(-y). */
10013 if (BUILTIN_EXPONENT_P (fcode1))
10015 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10016 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
10017 tree arglist = build_tree_list (NULL_TREE,
10018 fold_convert (type, arg));
10019 arg1 = build_function_call_expr (expfn, arglist);
10020 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10023 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10024 if (fcode1 == BUILT_IN_POW
10025 || fcode1 == BUILT_IN_POWF
10026 || fcode1 == BUILT_IN_POWL)
10028 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10029 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10030 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
10031 tree neg11 = fold_convert (type, negate_expr (arg11));
10032 tree arglist = tree_cons(NULL_TREE, arg10,
10033 build_tree_list (NULL_TREE, neg11));
10034 arg1 = build_function_call_expr (powfn, arglist);
10035 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10040 case TRUNC_DIV_EXPR:
10041 case FLOOR_DIV_EXPR:
10042 /* Simplify A / (B << N) where A and B are positive and B is
10043 a power of 2, to A >> (N + log2(B)). */
10044 strict_overflow_p = false;
10045 if (TREE_CODE (arg1) == LSHIFT_EXPR
10046 && (TYPE_UNSIGNED (type)
10047 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10049 tree sval = TREE_OPERAND (arg1, 0);
10050 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10052 tree sh_cnt = TREE_OPERAND (arg1, 1);
10053 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10055 if (strict_overflow_p)
10056 fold_overflow_warning (("assuming signed overflow does not "
10057 "occur when simplifying A / (B << N)"),
10058 WARN_STRICT_OVERFLOW_MISC);
10060 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10061 sh_cnt, build_int_cst (NULL_TREE, pow2));
10062 return fold_build2 (RSHIFT_EXPR, type,
10063 fold_convert (type, arg0), sh_cnt);
10068 case ROUND_DIV_EXPR:
10069 case CEIL_DIV_EXPR:
10070 case EXACT_DIV_EXPR:
10071 if (integer_onep (arg1))
10072 return non_lvalue (fold_convert (type, arg0));
10073 if (integer_zerop (arg1))
10075 /* X / -1 is -X. */
10076 if (!TYPE_UNSIGNED (type)
10077 && TREE_CODE (arg1) == INTEGER_CST
10078 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10079 && TREE_INT_CST_HIGH (arg1) == -1)
10080 return fold_convert (type, negate_expr (arg0));
10082 /* Convert -A / -B to A / B when the type is signed and overflow is
10084 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10085 && TREE_CODE (arg0) == NEGATE_EXPR
10086 && negate_expr_p (arg1))
10088 if (INTEGRAL_TYPE_P (type))
10089 fold_overflow_warning (("assuming signed overflow does not occur "
10090 "when distributing negation across "
10092 WARN_STRICT_OVERFLOW_MISC);
10093 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10094 negate_expr (arg1));
10096 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10097 && TREE_CODE (arg1) == NEGATE_EXPR
10098 && negate_expr_p (arg0))
10100 if (INTEGRAL_TYPE_P (type))
10101 fold_overflow_warning (("assuming signed overflow does not occur "
10102 "when distributing negation across "
10104 WARN_STRICT_OVERFLOW_MISC);
10105 return fold_build2 (code, type, negate_expr (arg0),
10106 TREE_OPERAND (arg1, 0));
10109 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10110 operation, EXACT_DIV_EXPR.
10112 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10113 At one time others generated faster code, it's not clear if they do
10114 after the last round to changes to the DIV code in expmed.c. */
10115 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10116 && multiple_of_p (type, arg0, arg1))
10117 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10119 strict_overflow_p = false;
10120 if (TREE_CODE (arg1) == INTEGER_CST
10121 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10122 &strict_overflow_p)))
10124 if (strict_overflow_p)
10125 fold_overflow_warning (("assuming signed overflow does not occur "
10126 "when simplifying division"),
10127 WARN_STRICT_OVERFLOW_MISC);
10128 return fold_convert (type, tem);
10133 case CEIL_MOD_EXPR:
10134 case FLOOR_MOD_EXPR:
10135 case ROUND_MOD_EXPR:
10136 case TRUNC_MOD_EXPR:
10137 /* X % 1 is always zero, but be sure to preserve any side
10139 if (integer_onep (arg1))
10140 return omit_one_operand (type, integer_zero_node, arg0);
10142 /* X % 0, return X % 0 unchanged so that we can get the
10143 proper warnings and errors. */
10144 if (integer_zerop (arg1))
10147 /* 0 % X is always zero, but be sure to preserve any side
10148 effects in X. Place this after checking for X == 0. */
10149 if (integer_zerop (arg0))
10150 return omit_one_operand (type, integer_zero_node, arg1);
10152 /* X % -1 is zero. */
10153 if (!TYPE_UNSIGNED (type)
10154 && TREE_CODE (arg1) == INTEGER_CST
10155 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10156 && TREE_INT_CST_HIGH (arg1) == -1)
10157 return omit_one_operand (type, integer_zero_node, arg0);
10159 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10160 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10161 strict_overflow_p = false;
10162 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10163 && (TYPE_UNSIGNED (type)
10164 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10167 /* Also optimize A % (C << N) where C is a power of 2,
10168 to A & ((C << N) - 1). */
10169 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10170 c = TREE_OPERAND (arg1, 0);
10172 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10174 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1),
10175 arg1, integer_one_node);
10176 if (strict_overflow_p)
10177 fold_overflow_warning (("assuming signed overflow does not "
10178 "occur when simplifying "
10179 "X % (power of two)"),
10180 WARN_STRICT_OVERFLOW_MISC);
10181 return fold_build2 (BIT_AND_EXPR, type,
10182 fold_convert (type, arg0),
10183 fold_convert (type, mask));
10187 /* X % -C is the same as X % C. */
10188 if (code == TRUNC_MOD_EXPR
10189 && !TYPE_UNSIGNED (type)
10190 && TREE_CODE (arg1) == INTEGER_CST
10191 && !TREE_CONSTANT_OVERFLOW (arg1)
10192 && TREE_INT_CST_HIGH (arg1) < 0
10193 && !TYPE_OVERFLOW_TRAPS (type)
10194 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10195 && !sign_bit_p (arg1, arg1))
10196 return fold_build2 (code, type, fold_convert (type, arg0),
10197 fold_convert (type, negate_expr (arg1)));
10199 /* X % -Y is the same as X % Y. */
10200 if (code == TRUNC_MOD_EXPR
10201 && !TYPE_UNSIGNED (type)
10202 && TREE_CODE (arg1) == NEGATE_EXPR
10203 && !TYPE_OVERFLOW_TRAPS (type))
10204 return fold_build2 (code, type, fold_convert (type, arg0),
10205 fold_convert (type, TREE_OPERAND (arg1, 0)));
10207 if (TREE_CODE (arg1) == INTEGER_CST
10208 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10209 &strict_overflow_p)))
10211 if (strict_overflow_p)
10212 fold_overflow_warning (("assuming signed overflow does not occur "
10213 "when simplifying modulos"),
10214 WARN_STRICT_OVERFLOW_MISC);
10215 return fold_convert (type, tem);
10222 if (integer_all_onesp (arg0))
10223 return omit_one_operand (type, arg0, arg1);
10227 /* Optimize -1 >> x for arithmetic right shifts. */
10228 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10229 return omit_one_operand (type, arg0, arg1);
10230 /* ... fall through ... */
10234 if (integer_zerop (arg1))
10235 return non_lvalue (fold_convert (type, arg0));
10236 if (integer_zerop (arg0))
10237 return omit_one_operand (type, arg0, arg1);
10239 /* Since negative shift count is not well-defined,
10240 don't try to compute it in the compiler. */
10241 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10244 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10245 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10246 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10247 && host_integerp (TREE_OPERAND (arg0, 1), false)
10248 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10250 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10251 + TREE_INT_CST_LOW (arg1));
10253 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10254 being well defined. */
10255 if (low >= TYPE_PRECISION (type))
10257 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10258 low = low % TYPE_PRECISION (type);
10259 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10260 return build_int_cst (type, 0);
10262 low = TYPE_PRECISION (type) - 1;
10265 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10266 build_int_cst (type, low));
10269 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10270 into x & ((unsigned)-1 >> c) for unsigned types. */
10271 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10272 || (TYPE_UNSIGNED (type)
10273 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10274 && host_integerp (arg1, false)
10275 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10276 && host_integerp (TREE_OPERAND (arg0, 1), false)
10277 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10279 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10280 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10286 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10288 lshift = build_int_cst (type, -1);
10289 lshift = int_const_binop (code, lshift, arg1, 0);
10291 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10295 /* Rewrite an LROTATE_EXPR by a constant into an
10296 RROTATE_EXPR by a new constant. */
10297 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10299 tree tem = build_int_cst (NULL_TREE,
10300 GET_MODE_BITSIZE (TYPE_MODE (type)));
10301 tem = fold_convert (TREE_TYPE (arg1), tem);
10302 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10303 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10306 /* If we have a rotate of a bit operation with the rotate count and
10307 the second operand of the bit operation both constant,
10308 permute the two operations. */
10309 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10310 && (TREE_CODE (arg0) == BIT_AND_EXPR
10311 || TREE_CODE (arg0) == BIT_IOR_EXPR
10312 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10313 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10314 return fold_build2 (TREE_CODE (arg0), type,
10315 fold_build2 (code, type,
10316 TREE_OPERAND (arg0, 0), arg1),
10317 fold_build2 (code, type,
10318 TREE_OPERAND (arg0, 1), arg1));
10320 /* Two consecutive rotates adding up to the width of the mode can
10322 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10323 && TREE_CODE (arg0) == RROTATE_EXPR
10324 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10325 && TREE_INT_CST_HIGH (arg1) == 0
10326 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10327 && ((TREE_INT_CST_LOW (arg1)
10328 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10329 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10330 return TREE_OPERAND (arg0, 0);
10335 if (operand_equal_p (arg0, arg1, 0))
10336 return omit_one_operand (type, arg0, arg1);
10337 if (INTEGRAL_TYPE_P (type)
10338 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10339 return omit_one_operand (type, arg1, arg0);
10340 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10346 if (operand_equal_p (arg0, arg1, 0))
10347 return omit_one_operand (type, arg0, arg1);
10348 if (INTEGRAL_TYPE_P (type)
10349 && TYPE_MAX_VALUE (type)
10350 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10351 return omit_one_operand (type, arg1, arg0);
10352 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10357 case TRUTH_ANDIF_EXPR:
10358 /* Note that the operands of this must be ints
10359 and their values must be 0 or 1.
10360 ("true" is a fixed value perhaps depending on the language.) */
10361 /* If first arg is constant zero, return it. */
10362 if (integer_zerop (arg0))
10363 return fold_convert (type, arg0);
10364 case TRUTH_AND_EXPR:
10365 /* If either arg is constant true, drop it. */
10366 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10367 return non_lvalue (fold_convert (type, arg1));
10368 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10369 /* Preserve sequence points. */
10370 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10371 return non_lvalue (fold_convert (type, arg0));
10372 /* If second arg is constant zero, result is zero, but first arg
10373 must be evaluated. */
10374 if (integer_zerop (arg1))
10375 return omit_one_operand (type, arg1, arg0);
10376 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10377 case will be handled here. */
10378 if (integer_zerop (arg0))
10379 return omit_one_operand (type, arg0, arg1);
10381 /* !X && X is always false. */
10382 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10383 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10384 return omit_one_operand (type, integer_zero_node, arg1);
10385 /* X && !X is always false. */
10386 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10387 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10388 return omit_one_operand (type, integer_zero_node, arg0);
10390 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10391 means A >= Y && A != MAX, but in this case we know that
10394 if (!TREE_SIDE_EFFECTS (arg0)
10395 && !TREE_SIDE_EFFECTS (arg1))
10397 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10398 if (tem && !operand_equal_p (tem, arg0, 0))
10399 return fold_build2 (code, type, tem, arg1);
10401 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10402 if (tem && !operand_equal_p (tem, arg1, 0))
10403 return fold_build2 (code, type, arg0, tem);
10407 /* We only do these simplifications if we are optimizing. */
10411 /* Check for things like (A || B) && (A || C). We can convert this
10412 to A || (B && C). Note that either operator can be any of the four
10413 truth and/or operations and the transformation will still be
10414 valid. Also note that we only care about order for the
10415 ANDIF and ORIF operators. If B contains side effects, this
10416 might change the truth-value of A. */
10417 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10418 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10419 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10420 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10421 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10422 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10424 tree a00 = TREE_OPERAND (arg0, 0);
10425 tree a01 = TREE_OPERAND (arg0, 1);
10426 tree a10 = TREE_OPERAND (arg1, 0);
10427 tree a11 = TREE_OPERAND (arg1, 1);
10428 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10429 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10430 && (code == TRUTH_AND_EXPR
10431 || code == TRUTH_OR_EXPR));
10433 if (operand_equal_p (a00, a10, 0))
10434 return fold_build2 (TREE_CODE (arg0), type, a00,
10435 fold_build2 (code, type, a01, a11));
10436 else if (commutative && operand_equal_p (a00, a11, 0))
10437 return fold_build2 (TREE_CODE (arg0), type, a00,
10438 fold_build2 (code, type, a01, a10));
10439 else if (commutative && operand_equal_p (a01, a10, 0))
10440 return fold_build2 (TREE_CODE (arg0), type, a01,
10441 fold_build2 (code, type, a00, a11));
10443 /* This case if tricky because we must either have commutative
10444 operators or else A10 must not have side-effects. */
10446 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10447 && operand_equal_p (a01, a11, 0))
10448 return fold_build2 (TREE_CODE (arg0), type,
10449 fold_build2 (code, type, a00, a10),
10453 /* See if we can build a range comparison. */
10454 if (0 != (tem = fold_range_test (code, type, op0, op1)))
10457 /* Check for the possibility of merging component references. If our
10458 lhs is another similar operation, try to merge its rhs with our
10459 rhs. Then try to merge our lhs and rhs. */
10460 if (TREE_CODE (arg0) == code
10461 && 0 != (tem = fold_truthop (code, type,
10462 TREE_OPERAND (arg0, 1), arg1)))
10463 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10465 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10470 case TRUTH_ORIF_EXPR:
10471 /* Note that the operands of this must be ints
10472 and their values must be 0 or true.
10473 ("true" is a fixed value perhaps depending on the language.) */
10474 /* If first arg is constant true, return it. */
10475 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10476 return fold_convert (type, arg0);
10477 case TRUTH_OR_EXPR:
10478 /* If either arg is constant zero, drop it. */
10479 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10480 return non_lvalue (fold_convert (type, arg1));
10481 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10482 /* Preserve sequence points. */
10483 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10484 return non_lvalue (fold_convert (type, arg0));
10485 /* If second arg is constant true, result is true, but we must
10486 evaluate first arg. */
10487 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10488 return omit_one_operand (type, arg1, arg0);
10489 /* Likewise for first arg, but note this only occurs here for
10491 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10492 return omit_one_operand (type, arg0, arg1);
10494 /* !X || X is always true. */
10495 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10496 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10497 return omit_one_operand (type, integer_one_node, arg1);
10498 /* X || !X is always true. */
10499 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10500 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10501 return omit_one_operand (type, integer_one_node, arg0);
10505 case TRUTH_XOR_EXPR:
10506 /* If the second arg is constant zero, drop it. */
10507 if (integer_zerop (arg1))
10508 return non_lvalue (fold_convert (type, arg0));
10509 /* If the second arg is constant true, this is a logical inversion. */
10510 if (integer_onep (arg1))
10512 /* Only call invert_truthvalue if operand is a truth value. */
10513 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10514 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10516 tem = invert_truthvalue (arg0);
10517 return non_lvalue (fold_convert (type, tem));
10519 /* Identical arguments cancel to zero. */
10520 if (operand_equal_p (arg0, arg1, 0))
10521 return omit_one_operand (type, integer_zero_node, arg0);
10523 /* !X ^ X is always true. */
10524 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10525 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10526 return omit_one_operand (type, integer_one_node, arg1);
10528 /* X ^ !X is always true. */
10529 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10530 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10531 return omit_one_operand (type, integer_one_node, arg0);
10537 tem = fold_comparison (code, type, op0, op1);
10538 if (tem != NULL_TREE)
10541 /* bool_var != 0 becomes bool_var. */
10542 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10543 && code == NE_EXPR)
10544 return non_lvalue (fold_convert (type, arg0));
10546 /* bool_var == 1 becomes bool_var. */
10547 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10548 && code == EQ_EXPR)
10549 return non_lvalue (fold_convert (type, arg0));
10551 /* bool_var != 1 becomes !bool_var. */
10552 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10553 && code == NE_EXPR)
10554 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10556 /* bool_var == 0 becomes !bool_var. */
10557 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10558 && code == EQ_EXPR)
10559 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10561 /* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */
10562 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10563 && TREE_CODE (arg1) == INTEGER_CST)
10565 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
10566 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10567 fold_build1 (BIT_NOT_EXPR, cmp_type,
10568 fold_convert (cmp_type, arg1)));
10571 /* If this is an equality comparison of the address of a non-weak
10572 object against zero, then we know the result. */
10573 if (TREE_CODE (arg0) == ADDR_EXPR
10574 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10575 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10576 && integer_zerop (arg1))
10577 return constant_boolean_node (code != EQ_EXPR, type);
10579 /* If this is an equality comparison of the address of two non-weak,
10580 unaliased symbols neither of which are extern (since we do not
10581 have access to attributes for externs), then we know the result. */
10582 if (TREE_CODE (arg0) == ADDR_EXPR
10583 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10584 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10585 && ! lookup_attribute ("alias",
10586 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10587 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10588 && TREE_CODE (arg1) == ADDR_EXPR
10589 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10590 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10591 && ! lookup_attribute ("alias",
10592 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10593 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10595 /* We know that we're looking at the address of two
10596 non-weak, unaliased, static _DECL nodes.
10598 It is both wasteful and incorrect to call operand_equal_p
10599 to compare the two ADDR_EXPR nodes. It is wasteful in that
10600 all we need to do is test pointer equality for the arguments
10601 to the two ADDR_EXPR nodes. It is incorrect to use
10602 operand_equal_p as that function is NOT equivalent to a
10603 C equality test. It can in fact return false for two
10604 objects which would test as equal using the C equality
10606 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10607 return constant_boolean_node (equal
10608 ? code == EQ_EXPR : code != EQ_EXPR,
10612 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10613 a MINUS_EXPR of a constant, we can convert it into a comparison with
10614 a revised constant as long as no overflow occurs. */
10615 if (TREE_CODE (arg1) == INTEGER_CST
10616 && (TREE_CODE (arg0) == PLUS_EXPR
10617 || TREE_CODE (arg0) == MINUS_EXPR)
10618 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10619 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10620 ? MINUS_EXPR : PLUS_EXPR,
10621 fold_convert (TREE_TYPE (arg0), arg1),
10622 TREE_OPERAND (arg0, 1), 0))
10623 && ! TREE_CONSTANT_OVERFLOW (tem))
10624 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10626 /* Similarly for a NEGATE_EXPR. */
10627 if (TREE_CODE (arg0) == NEGATE_EXPR
10628 && TREE_CODE (arg1) == INTEGER_CST
10629 && 0 != (tem = negate_expr (arg1))
10630 && TREE_CODE (tem) == INTEGER_CST
10631 && ! TREE_CONSTANT_OVERFLOW (tem))
10632 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10634 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10635 for !=. Don't do this for ordered comparisons due to overflow. */
10636 if (TREE_CODE (arg0) == MINUS_EXPR
10637 && integer_zerop (arg1))
10638 return fold_build2 (code, type,
10639 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10641 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10642 if (TREE_CODE (arg0) == ABS_EXPR
10643 && (integer_zerop (arg1) || real_zerop (arg1)))
10644 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10646 /* If this is an EQ or NE comparison with zero and ARG0 is
10647 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10648 two operations, but the latter can be done in one less insn
10649 on machines that have only two-operand insns or on which a
10650 constant cannot be the first operand. */
10651 if (TREE_CODE (arg0) == BIT_AND_EXPR
10652 && integer_zerop (arg1))
10654 tree arg00 = TREE_OPERAND (arg0, 0);
10655 tree arg01 = TREE_OPERAND (arg0, 1);
10656 if (TREE_CODE (arg00) == LSHIFT_EXPR
10657 && integer_onep (TREE_OPERAND (arg00, 0)))
10659 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10660 arg01, TREE_OPERAND (arg00, 1));
10661 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10662 build_int_cst (TREE_TYPE (arg0), 1));
10663 return fold_build2 (code, type,
10664 fold_convert (TREE_TYPE (arg1), tem), arg1);
10666 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10667 && integer_onep (TREE_OPERAND (arg01, 0)))
10669 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10670 arg00, TREE_OPERAND (arg01, 1));
10671 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10672 build_int_cst (TREE_TYPE (arg0), 1));
10673 return fold_build2 (code, type,
10674 fold_convert (TREE_TYPE (arg1), tem), arg1);
10678 /* If this is an NE or EQ comparison of zero against the result of a
10679 signed MOD operation whose second operand is a power of 2, make
10680 the MOD operation unsigned since it is simpler and equivalent. */
10681 if (integer_zerop (arg1)
10682 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10683 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10684 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10685 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10686 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10687 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10689 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10690 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10691 fold_convert (newtype,
10692 TREE_OPERAND (arg0, 0)),
10693 fold_convert (newtype,
10694 TREE_OPERAND (arg0, 1)));
10696 return fold_build2 (code, type, newmod,
10697 fold_convert (newtype, arg1));
10700 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10701 C1 is a valid shift constant, and C2 is a power of two, i.e.
10703 if (TREE_CODE (arg0) == BIT_AND_EXPR
10704 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10705 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10707 && integer_pow2p (TREE_OPERAND (arg0, 1))
10708 && integer_zerop (arg1))
10710 tree itype = TREE_TYPE (arg0);
10711 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10712 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10714 /* Check for a valid shift count. */
10715 if (TREE_INT_CST_HIGH (arg001) == 0
10716 && TREE_INT_CST_LOW (arg001) < prec)
10718 tree arg01 = TREE_OPERAND (arg0, 1);
10719 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10720 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10721 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10722 can be rewritten as (X & (C2 << C1)) != 0. */
10723 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10725 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10726 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10727 return fold_build2 (code, type, tem, arg1);
10729 /* Otherwise, for signed (arithmetic) shifts,
10730 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10731 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10732 else if (!TYPE_UNSIGNED (itype))
10733 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10734 arg000, build_int_cst (itype, 0));
10735 /* Otherwise, of unsigned (logical) shifts,
10736 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10737 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10739 return omit_one_operand (type,
10740 code == EQ_EXPR ? integer_one_node
10741 : integer_zero_node,
10746 /* If this is an NE comparison of zero with an AND of one, remove the
10747 comparison since the AND will give the correct value. */
10748 if (code == NE_EXPR
10749 && integer_zerop (arg1)
10750 && TREE_CODE (arg0) == BIT_AND_EXPR
10751 && integer_onep (TREE_OPERAND (arg0, 1)))
10752 return fold_convert (type, arg0);
10754 /* If we have (A & C) == C where C is a power of 2, convert this into
10755 (A & C) != 0. Similarly for NE_EXPR. */
10756 if (TREE_CODE (arg0) == BIT_AND_EXPR
10757 && integer_pow2p (TREE_OPERAND (arg0, 1))
10758 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10759 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10760 arg0, fold_convert (TREE_TYPE (arg0),
10761 integer_zero_node));
10763 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10764 bit, then fold the expression into A < 0 or A >= 0. */
10765 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10769 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10770 Similarly for NE_EXPR. */
10771 if (TREE_CODE (arg0) == BIT_AND_EXPR
10772 && TREE_CODE (arg1) == INTEGER_CST
10773 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10775 tree notc = fold_build1 (BIT_NOT_EXPR,
10776 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10777 TREE_OPERAND (arg0, 1));
10778 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10780 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10781 if (integer_nonzerop (dandnotc))
10782 return omit_one_operand (type, rslt, arg0);
10785 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10786 Similarly for NE_EXPR. */
10787 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10788 && TREE_CODE (arg1) == INTEGER_CST
10789 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10791 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10792 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10793 TREE_OPERAND (arg0, 1), notd);
10794 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10795 if (integer_nonzerop (candnotd))
10796 return omit_one_operand (type, rslt, arg0);
10799 /* If this is a comparison of a field, we may be able to simplify it. */
10800 if (((TREE_CODE (arg0) == COMPONENT_REF
10801 && lang_hooks.can_use_bit_fields_p ())
10802 || TREE_CODE (arg0) == BIT_FIELD_REF)
10803 /* Handle the constant case even without -O
10804 to make sure the warnings are given. */
10805 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10807 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10812 /* Optimize comparisons of strlen vs zero to a compare of the
10813 first character of the string vs zero. To wit,
10814 strlen(ptr) == 0 => *ptr == 0
10815 strlen(ptr) != 0 => *ptr != 0
10816 Other cases should reduce to one of these two (or a constant)
10817 due to the return value of strlen being unsigned. */
10818 if (TREE_CODE (arg0) == CALL_EXPR
10819 && integer_zerop (arg1))
10821 tree fndecl = get_callee_fndecl (arg0);
10825 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10826 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10827 && (arglist = TREE_OPERAND (arg0, 1))
10828 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10829 && ! TREE_CHAIN (arglist))
10831 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
10832 return fold_build2 (code, type, iref,
10833 build_int_cst (TREE_TYPE (iref), 0));
10837 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10838 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10839 if (TREE_CODE (arg0) == RSHIFT_EXPR
10840 && integer_zerop (arg1)
10841 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10843 tree arg00 = TREE_OPERAND (arg0, 0);
10844 tree arg01 = TREE_OPERAND (arg0, 1);
10845 tree itype = TREE_TYPE (arg00);
10846 if (TREE_INT_CST_HIGH (arg01) == 0
10847 && TREE_INT_CST_LOW (arg01)
10848 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
10850 if (TYPE_UNSIGNED (itype))
10852 itype = lang_hooks.types.signed_type (itype);
10853 arg00 = fold_convert (itype, arg00);
10855 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10856 type, arg00, build_int_cst (itype, 0));
10860 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
10861 if (integer_zerop (arg1)
10862 && TREE_CODE (arg0) == BIT_XOR_EXPR)
10863 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10864 TREE_OPERAND (arg0, 1));
10866 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
10867 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10868 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10869 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10870 build_int_cst (TREE_TYPE (arg1), 0));
10871 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
10872 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10873 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10874 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10875 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
10876 build_int_cst (TREE_TYPE (arg1), 0));
10878 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
10879 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10880 && TREE_CODE (arg1) == INTEGER_CST
10881 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10882 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10883 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
10884 TREE_OPERAND (arg0, 1), arg1));
10886 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10887 (X & C) == 0 when C is a single bit. */
10888 if (TREE_CODE (arg0) == BIT_AND_EXPR
10889 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10890 && integer_zerop (arg1)
10891 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10893 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10894 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10895 TREE_OPERAND (arg0, 1));
10896 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10900 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10901 constant C is a power of two, i.e. a single bit. */
10902 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10903 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10904 && integer_zerop (arg1)
10905 && integer_pow2p (TREE_OPERAND (arg0, 1))
10906 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10907 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10909 tree arg00 = TREE_OPERAND (arg0, 0);
10910 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10911 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10914 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10915 when is C is a power of two, i.e. a single bit. */
10916 if (TREE_CODE (arg0) == BIT_AND_EXPR
10917 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10918 && integer_zerop (arg1)
10919 && integer_pow2p (TREE_OPERAND (arg0, 1))
10920 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10921 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10923 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10924 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
10925 arg000, TREE_OPERAND (arg0, 1));
10926 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10927 tem, build_int_cst (TREE_TYPE (tem), 0));
10930 if (integer_zerop (arg1)
10931 && tree_expr_nonzero_p (arg0))
10933 tree res = constant_boolean_node (code==NE_EXPR, type);
10934 return omit_one_operand (type, res, arg0);
10942 tem = fold_comparison (code, type, op0, op1);
10943 if (tem != NULL_TREE)
10946 /* Transform comparisons of the form X +- C CMP X. */
10947 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10948 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10949 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10950 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
10951 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10952 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
10954 tree arg01 = TREE_OPERAND (arg0, 1);
10955 enum tree_code code0 = TREE_CODE (arg0);
10958 if (TREE_CODE (arg01) == REAL_CST)
10959 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10961 is_positive = tree_int_cst_sgn (arg01);
10963 /* (X - c) > X becomes false. */
10964 if (code == GT_EXPR
10965 && ((code0 == MINUS_EXPR && is_positive >= 0)
10966 || (code0 == PLUS_EXPR && is_positive <= 0)))
10968 if (TREE_CODE (arg01) == INTEGER_CST
10969 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10970 fold_overflow_warning (("assuming signed overflow does not "
10971 "occur when assuming that (X - c) > X "
10972 "is always false"),
10973 WARN_STRICT_OVERFLOW_ALL);
10974 return constant_boolean_node (0, type);
10977 /* Likewise (X + c) < X becomes false. */
10978 if (code == LT_EXPR
10979 && ((code0 == PLUS_EXPR && is_positive >= 0)
10980 || (code0 == MINUS_EXPR && is_positive <= 0)))
10982 if (TREE_CODE (arg01) == INTEGER_CST
10983 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10984 fold_overflow_warning (("assuming signed overflow does not "
10985 "occur when assuming that "
10986 "(X + c) < X is always false"),
10987 WARN_STRICT_OVERFLOW_ALL);
10988 return constant_boolean_node (0, type);
10991 /* Convert (X - c) <= X to true. */
10992 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10994 && ((code0 == MINUS_EXPR && is_positive >= 0)
10995 || (code0 == PLUS_EXPR && is_positive <= 0)))
10997 if (TREE_CODE (arg01) == INTEGER_CST
10998 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10999 fold_overflow_warning (("assuming signed overflow does not "
11000 "occur when assuming that "
11001 "(X - c) <= X is always true"),
11002 WARN_STRICT_OVERFLOW_ALL);
11003 return constant_boolean_node (1, type);
11006 /* Convert (X + c) >= X to true. */
11007 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11009 && ((code0 == PLUS_EXPR && is_positive >= 0)
11010 || (code0 == MINUS_EXPR && is_positive <= 0)))
11012 if (TREE_CODE (arg01) == INTEGER_CST
11013 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11014 fold_overflow_warning (("assuming signed overflow does not "
11015 "occur when assuming that "
11016 "(X + c) >= X is always true"),
11017 WARN_STRICT_OVERFLOW_ALL);
11018 return constant_boolean_node (1, type);
11021 if (TREE_CODE (arg01) == INTEGER_CST)
11023 /* Convert X + c > X and X - c < X to true for integers. */
11024 if (code == GT_EXPR
11025 && ((code0 == PLUS_EXPR && is_positive > 0)
11026 || (code0 == MINUS_EXPR && is_positive < 0)))
11028 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11029 fold_overflow_warning (("assuming signed overflow does "
11030 "not occur when assuming that "
11031 "(X + c) > X is always true"),
11032 WARN_STRICT_OVERFLOW_ALL);
11033 return constant_boolean_node (1, type);
11036 if (code == LT_EXPR
11037 && ((code0 == MINUS_EXPR && is_positive > 0)
11038 || (code0 == PLUS_EXPR && is_positive < 0)))
11040 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11041 fold_overflow_warning (("assuming signed overflow does "
11042 "not occur when assuming that "
11043 "(X - c) < X is always true"),
11044 WARN_STRICT_OVERFLOW_ALL);
11045 return constant_boolean_node (1, type);
11048 /* Convert X + c <= X and X - c >= X to false for integers. */
11049 if (code == LE_EXPR
11050 && ((code0 == PLUS_EXPR && is_positive > 0)
11051 || (code0 == MINUS_EXPR && is_positive < 0)))
11053 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11054 fold_overflow_warning (("assuming signed overflow does "
11055 "not occur when assuming that "
11056 "(X + c) <= X is always false"),
11057 WARN_STRICT_OVERFLOW_ALL);
11058 return constant_boolean_node (0, type);
11061 if (code == GE_EXPR
11062 && ((code0 == MINUS_EXPR && is_positive > 0)
11063 || (code0 == PLUS_EXPR && is_positive < 0)))
11065 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11066 fold_overflow_warning (("assuming signed overflow does "
11067 "not occur when assuming that "
11068 "(X - c) >= X is always true"),
11069 WARN_STRICT_OVERFLOW_ALL);
11070 return constant_boolean_node (0, type);
11075 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11076 This transformation affects the cases which are handled in later
11077 optimizations involving comparisons with non-negative constants. */
11078 if (TREE_CODE (arg1) == INTEGER_CST
11079 && TREE_CODE (arg0) != INTEGER_CST
11080 && tree_int_cst_sgn (arg1) > 0)
11082 if (code == GE_EXPR)
11084 arg1 = const_binop (MINUS_EXPR, arg1,
11085 build_int_cst (TREE_TYPE (arg1), 1), 0);
11086 return fold_build2 (GT_EXPR, type, arg0,
11087 fold_convert (TREE_TYPE (arg0), arg1));
11089 if (code == LT_EXPR)
11091 arg1 = const_binop (MINUS_EXPR, arg1,
11092 build_int_cst (TREE_TYPE (arg1), 1), 0);
11093 return fold_build2 (LE_EXPR, type, arg0,
11094 fold_convert (TREE_TYPE (arg0), arg1));
11098 /* Comparisons with the highest or lowest possible integer of
11099 the specified size will have known values. */
11101 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
11103 if (TREE_CODE (arg1) == INTEGER_CST
11104 && ! TREE_CONSTANT_OVERFLOW (arg1)
11105 && width <= 2 * HOST_BITS_PER_WIDE_INT
11106 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11107 || POINTER_TYPE_P (TREE_TYPE (arg1))))
11109 HOST_WIDE_INT signed_max_hi;
11110 unsigned HOST_WIDE_INT signed_max_lo;
11111 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
11113 if (width <= HOST_BITS_PER_WIDE_INT)
11115 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11120 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
11122 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11128 max_lo = signed_max_lo;
11129 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11135 width -= HOST_BITS_PER_WIDE_INT;
11136 signed_max_lo = -1;
11137 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11142 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
11144 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11149 max_hi = signed_max_hi;
11150 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11154 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11155 && TREE_INT_CST_LOW (arg1) == max_lo)
11159 return omit_one_operand (type, integer_zero_node, arg0);
11162 return fold_build2 (EQ_EXPR, type, op0, op1);
11165 return omit_one_operand (type, integer_one_node, arg0);
11168 return fold_build2 (NE_EXPR, type, op0, op1);
11170 /* The GE_EXPR and LT_EXPR cases above are not normally
11171 reached because of previous transformations. */
11176 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11178 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11182 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
11183 return fold_build2 (EQ_EXPR, type,
11184 fold_convert (TREE_TYPE (arg1), arg0),
11187 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
11188 return fold_build2 (NE_EXPR, type,
11189 fold_convert (TREE_TYPE (arg1), arg0),
11194 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11196 && TREE_INT_CST_LOW (arg1) == min_lo)
11200 return omit_one_operand (type, integer_zero_node, arg0);
11203 return fold_build2 (EQ_EXPR, type, op0, op1);
11206 return omit_one_operand (type, integer_one_node, arg0);
11209 return fold_build2 (NE_EXPR, type, op0, op1);
11214 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11216 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11220 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11221 return fold_build2 (NE_EXPR, type,
11222 fold_convert (TREE_TYPE (arg1), arg0),
11225 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11226 return fold_build2 (EQ_EXPR, type,
11227 fold_convert (TREE_TYPE (arg1), arg0),
11233 else if (!in_gimple_form
11234 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
11235 && TREE_INT_CST_LOW (arg1) == signed_max_lo
11236 && TYPE_UNSIGNED (TREE_TYPE (arg1))
11237 /* signed_type does not work on pointer types. */
11238 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
11240 /* The following case also applies to X < signed_max+1
11241 and X >= signed_max+1 because previous transformations. */
11242 if (code == LE_EXPR || code == GT_EXPR)
11245 st = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11246 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
11247 type, fold_convert (st, arg0),
11248 build_int_cst (st, 0));
11254 /* If we are comparing an ABS_EXPR with a constant, we can
11255 convert all the cases into explicit comparisons, but they may
11256 well not be faster than doing the ABS and one comparison.
11257 But ABS (X) <= C is a range comparison, which becomes a subtraction
11258 and a comparison, and is probably faster. */
11259 if (code == LE_EXPR
11260 && TREE_CODE (arg1) == INTEGER_CST
11261 && TREE_CODE (arg0) == ABS_EXPR
11262 && ! TREE_SIDE_EFFECTS (arg0)
11263 && (0 != (tem = negate_expr (arg1)))
11264 && TREE_CODE (tem) == INTEGER_CST
11265 && ! TREE_CONSTANT_OVERFLOW (tem))
11266 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11267 build2 (GE_EXPR, type,
11268 TREE_OPERAND (arg0, 0), tem),
11269 build2 (LE_EXPR, type,
11270 TREE_OPERAND (arg0, 0), arg1));
11272 /* Convert ABS_EXPR<x> >= 0 to true. */
11273 strict_overflow_p = false;
11274 if (code == GE_EXPR
11275 && (integer_zerop (arg1)
11276 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11277 && real_zerop (arg1)))
11278 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11280 if (strict_overflow_p)
11281 fold_overflow_warning (("assuming signed overflow does not occur "
11282 "when simplifying comparison of "
11283 "absolute value and zero"),
11284 WARN_STRICT_OVERFLOW_CONDITIONAL);
11285 return omit_one_operand (type, integer_one_node, arg0);
11288 /* Convert ABS_EXPR<x> < 0 to false. */
11289 strict_overflow_p = false;
11290 if (code == LT_EXPR
11291 && (integer_zerop (arg1) || real_zerop (arg1))
11292 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11294 if (strict_overflow_p)
11295 fold_overflow_warning (("assuming signed overflow does not occur "
11296 "when simplifying comparison of "
11297 "absolute value and zero"),
11298 WARN_STRICT_OVERFLOW_CONDITIONAL);
11299 return omit_one_operand (type, integer_zero_node, arg0);
11302 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11303 and similarly for >= into !=. */
11304 if ((code == LT_EXPR || code == GE_EXPR)
11305 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11306 && TREE_CODE (arg1) == LSHIFT_EXPR
11307 && integer_onep (TREE_OPERAND (arg1, 0)))
11308 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11309 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11310 TREE_OPERAND (arg1, 1)),
11311 build_int_cst (TREE_TYPE (arg0), 0));
11313 if ((code == LT_EXPR || code == GE_EXPR)
11314 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11315 && (TREE_CODE (arg1) == NOP_EXPR
11316 || TREE_CODE (arg1) == CONVERT_EXPR)
11317 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11318 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11320 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11321 fold_convert (TREE_TYPE (arg0),
11322 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11323 TREE_OPERAND (TREE_OPERAND (arg1, 0),
11325 build_int_cst (TREE_TYPE (arg0), 0));
11329 case UNORDERED_EXPR:
11337 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
11339 t1 = fold_relational_const (code, type, arg0, arg1);
11340 if (t1 != NULL_TREE)
11344 /* If the first operand is NaN, the result is constant. */
11345 if (TREE_CODE (arg0) == REAL_CST
11346 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
11347 && (code != LTGT_EXPR || ! flag_trapping_math))
11349 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11350 ? integer_zero_node
11351 : integer_one_node;
11352 return omit_one_operand (type, t1, arg1);
11355 /* If the second operand is NaN, the result is constant. */
11356 if (TREE_CODE (arg1) == REAL_CST
11357 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
11358 && (code != LTGT_EXPR || ! flag_trapping_math))
11360 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11361 ? integer_zero_node
11362 : integer_one_node;
11363 return omit_one_operand (type, t1, arg0);
11366 /* Simplify unordered comparison of something with itself. */
11367 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
11368 && operand_equal_p (arg0, arg1, 0))
11369 return constant_boolean_node (1, type);
11371 if (code == LTGT_EXPR
11372 && !flag_trapping_math
11373 && operand_equal_p (arg0, arg1, 0))
11374 return constant_boolean_node (0, type);
11376 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11378 tree targ0 = strip_float_extensions (arg0);
11379 tree targ1 = strip_float_extensions (arg1);
11380 tree newtype = TREE_TYPE (targ0);
11382 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11383 newtype = TREE_TYPE (targ1);
11385 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11386 return fold_build2 (code, type, fold_convert (newtype, targ0),
11387 fold_convert (newtype, targ1));
11392 case COMPOUND_EXPR:
11393 /* When pedantic, a compound expression can be neither an lvalue
11394 nor an integer constant expression. */
11395 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11397 /* Don't let (0, 0) be null pointer constant. */
11398 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11399 : fold_convert (type, arg1);
11400 return pedantic_non_lvalue (tem);
11403 if ((TREE_CODE (arg0) == REAL_CST
11404 && TREE_CODE (arg1) == REAL_CST)
11405 || (TREE_CODE (arg0) == INTEGER_CST
11406 && TREE_CODE (arg1) == INTEGER_CST))
11407 return build_complex (type, arg0, arg1);
11411 /* An ASSERT_EXPR should never be passed to fold_binary. */
11412 gcc_unreachable ();
11416 } /* switch (code) */
11419 /* Callback for walk_tree, looking for LABEL_EXPR.
11420 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11421 Do not check the sub-tree of GOTO_EXPR. */
11424 contains_label_1 (tree *tp,
11425 int *walk_subtrees,
11426 void *data ATTRIBUTE_UNUSED)
11428 switch (TREE_CODE (*tp))
11433 *walk_subtrees = 0;
11440 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11441 accessible from outside the sub-tree. Returns NULL_TREE if no
11442 addressable label is found. */
11445 contains_label_p (tree st)
11447 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
11450 /* Fold a ternary expression of code CODE and type TYPE with operands
11451 OP0, OP1, and OP2. Return the folded expression if folding is
11452 successful. Otherwise, return NULL_TREE. */
11455 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
11458 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
11459 enum tree_code_class kind = TREE_CODE_CLASS (code);
11461 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11462 && TREE_CODE_LENGTH (code) == 3);
11464 /* Strip any conversions that don't change the mode. This is safe
11465 for every expression, except for a comparison expression because
11466 its signedness is derived from its operands. So, in the latter
11467 case, only strip conversions that don't change the signedness.
11469 Note that this is done as an internal manipulation within the
11470 constant folder, in order to find the simplest representation of
11471 the arguments so that their form can be studied. In any cases,
11472 the appropriate type conversions should be put back in the tree
11473 that will get out of the constant folder. */
11488 case COMPONENT_REF:
11489 if (TREE_CODE (arg0) == CONSTRUCTOR
11490 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11492 unsigned HOST_WIDE_INT idx;
11494 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11501 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11502 so all simple results must be passed through pedantic_non_lvalue. */
11503 if (TREE_CODE (arg0) == INTEGER_CST)
11505 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11506 tem = integer_zerop (arg0) ? op2 : op1;
11507 /* Only optimize constant conditions when the selected branch
11508 has the same type as the COND_EXPR. This avoids optimizing
11509 away "c ? x : throw", where the throw has a void type.
11510 Avoid throwing away that operand which contains label. */
11511 if ((!TREE_SIDE_EFFECTS (unused_op)
11512 || !contains_label_p (unused_op))
11513 && (! VOID_TYPE_P (TREE_TYPE (tem))
11514 || VOID_TYPE_P (type)))
11515 return pedantic_non_lvalue (tem);
11518 if (operand_equal_p (arg1, op2, 0))
11519 return pedantic_omit_one_operand (type, arg1, arg0);
11521 /* If we have A op B ? A : C, we may be able to convert this to a
11522 simpler expression, depending on the operation and the values
11523 of B and C. Signed zeros prevent all of these transformations,
11524 for reasons given above each one.
11526 Also try swapping the arguments and inverting the conditional. */
11527 if (COMPARISON_CLASS_P (arg0)
11528 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11529 arg1, TREE_OPERAND (arg0, 1))
11530 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11532 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11537 if (COMPARISON_CLASS_P (arg0)
11538 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11540 TREE_OPERAND (arg0, 1))
11541 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11543 tem = fold_truth_not_expr (arg0);
11544 if (tem && COMPARISON_CLASS_P (tem))
11546 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11552 /* If the second operand is simpler than the third, swap them
11553 since that produces better jump optimization results. */
11554 if (truth_value_p (TREE_CODE (arg0))
11555 && tree_swap_operands_p (op1, op2, false))
11557 /* See if this can be inverted. If it can't, possibly because
11558 it was a floating-point inequality comparison, don't do
11560 tem = fold_truth_not_expr (arg0);
11562 return fold_build3 (code, type, tem, op2, op1);
11565 /* Convert A ? 1 : 0 to simply A. */
11566 if (integer_onep (op1)
11567 && integer_zerop (op2)
11568 /* If we try to convert OP0 to our type, the
11569 call to fold will try to move the conversion inside
11570 a COND, which will recurse. In that case, the COND_EXPR
11571 is probably the best choice, so leave it alone. */
11572 && type == TREE_TYPE (arg0))
11573 return pedantic_non_lvalue (arg0);
11575 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11576 over COND_EXPR in cases such as floating point comparisons. */
11577 if (integer_zerop (op1)
11578 && integer_onep (op2)
11579 && truth_value_p (TREE_CODE (arg0)))
11580 return pedantic_non_lvalue (fold_convert (type,
11581 invert_truthvalue (arg0)));
11583 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11584 if (TREE_CODE (arg0) == LT_EXPR
11585 && integer_zerop (TREE_OPERAND (arg0, 1))
11586 && integer_zerop (op2)
11587 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11589 /* sign_bit_p only checks ARG1 bits within A's precision.
11590 If <sign bit of A> has wider type than A, bits outside
11591 of A's precision in <sign bit of A> need to be checked.
11592 If they are all 0, this optimization needs to be done
11593 in unsigned A's type, if they are all 1 in signed A's type,
11594 otherwise this can't be done. */
11595 if (TYPE_PRECISION (TREE_TYPE (tem))
11596 < TYPE_PRECISION (TREE_TYPE (arg1))
11597 && TYPE_PRECISION (TREE_TYPE (tem))
11598 < TYPE_PRECISION (type))
11600 unsigned HOST_WIDE_INT mask_lo;
11601 HOST_WIDE_INT mask_hi;
11602 int inner_width, outer_width;
11605 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11606 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11607 if (outer_width > TYPE_PRECISION (type))
11608 outer_width = TYPE_PRECISION (type);
11610 if (outer_width > HOST_BITS_PER_WIDE_INT)
11612 mask_hi = ((unsigned HOST_WIDE_INT) -1
11613 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
11619 mask_lo = ((unsigned HOST_WIDE_INT) -1
11620 >> (HOST_BITS_PER_WIDE_INT - outer_width));
11622 if (inner_width > HOST_BITS_PER_WIDE_INT)
11624 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
11625 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11629 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
11630 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11632 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
11633 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
11635 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
11636 tem = fold_convert (tem_type, tem);
11638 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
11639 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
11641 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
11642 tem = fold_convert (tem_type, tem);
11649 return fold_convert (type,
11650 fold_build2 (BIT_AND_EXPR,
11651 TREE_TYPE (tem), tem,
11652 fold_convert (TREE_TYPE (tem),
11656 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11657 already handled above. */
11658 if (TREE_CODE (arg0) == BIT_AND_EXPR
11659 && integer_onep (TREE_OPERAND (arg0, 1))
11660 && integer_zerop (op2)
11661 && integer_pow2p (arg1))
11663 tree tem = TREE_OPERAND (arg0, 0);
11665 if (TREE_CODE (tem) == RSHIFT_EXPR
11666 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11667 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11668 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11669 return fold_build2 (BIT_AND_EXPR, type,
11670 TREE_OPERAND (tem, 0), arg1);
11673 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11674 is probably obsolete because the first operand should be a
11675 truth value (that's why we have the two cases above), but let's
11676 leave it in until we can confirm this for all front-ends. */
11677 if (integer_zerop (op2)
11678 && TREE_CODE (arg0) == NE_EXPR
11679 && integer_zerop (TREE_OPERAND (arg0, 1))
11680 && integer_pow2p (arg1)
11681 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11682 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11683 arg1, OEP_ONLY_CONST))
11684 return pedantic_non_lvalue (fold_convert (type,
11685 TREE_OPERAND (arg0, 0)));
11687 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11688 if (integer_zerop (op2)
11689 && truth_value_p (TREE_CODE (arg0))
11690 && truth_value_p (TREE_CODE (arg1)))
11691 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11692 fold_convert (type, arg0),
11695 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11696 if (integer_onep (op2)
11697 && truth_value_p (TREE_CODE (arg0))
11698 && truth_value_p (TREE_CODE (arg1)))
11700 /* Only perform transformation if ARG0 is easily inverted. */
11701 tem = fold_truth_not_expr (arg0);
11703 return fold_build2 (TRUTH_ORIF_EXPR, type,
11704 fold_convert (type, tem),
11708 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11709 if (integer_zerop (arg1)
11710 && truth_value_p (TREE_CODE (arg0))
11711 && truth_value_p (TREE_CODE (op2)))
11713 /* Only perform transformation if ARG0 is easily inverted. */
11714 tem = fold_truth_not_expr (arg0);
11716 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11717 fold_convert (type, tem),
11721 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11722 if (integer_onep (arg1)
11723 && truth_value_p (TREE_CODE (arg0))
11724 && truth_value_p (TREE_CODE (op2)))
11725 return fold_build2 (TRUTH_ORIF_EXPR, type,
11726 fold_convert (type, arg0),
11732 /* Check for a built-in function. */
11733 if (TREE_CODE (op0) == ADDR_EXPR
11734 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11735 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11736 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11739 case BIT_FIELD_REF:
11740 if (TREE_CODE (arg0) == VECTOR_CST
11741 && type == TREE_TYPE (TREE_TYPE (arg0))
11742 && host_integerp (arg1, 1)
11743 && host_integerp (op2, 1))
11745 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11746 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11749 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11750 && (idx % width) == 0
11751 && (idx = idx / width)
11752 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11754 tree elements = TREE_VECTOR_CST_ELTS (arg0);
11755 while (idx-- > 0 && elements)
11756 elements = TREE_CHAIN (elements);
11758 return TREE_VALUE (elements);
11760 return fold_convert (type, integer_zero_node);
11767 } /* switch (code) */
11770 /* Perform constant folding and related simplification of EXPR.
11771 The related simplifications include x*1 => x, x*0 => 0, etc.,
11772 and application of the associative law.
11773 NOP_EXPR conversions may be removed freely (as long as we
11774 are careful not to change the type of the overall expression).
11775 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11776 but we can constant-fold them if they have constant operands. */
11778 #ifdef ENABLE_FOLD_CHECKING
11779 # define fold(x) fold_1 (x)
11780 static tree fold_1 (tree);
11786 const tree t = expr;
11787 enum tree_code code = TREE_CODE (t);
11788 enum tree_code_class kind = TREE_CODE_CLASS (code);
11791 /* Return right away if a constant. */
11792 if (kind == tcc_constant)
11795 if (IS_EXPR_CODE_CLASS (kind))
11797 tree type = TREE_TYPE (t);
11798 tree op0, op1, op2;
11800 switch (TREE_CODE_LENGTH (code))
11803 op0 = TREE_OPERAND (t, 0);
11804 tem = fold_unary (code, type, op0);
11805 return tem ? tem : expr;
11807 op0 = TREE_OPERAND (t, 0);
11808 op1 = TREE_OPERAND (t, 1);
11809 tem = fold_binary (code, type, op0, op1);
11810 return tem ? tem : expr;
11812 op0 = TREE_OPERAND (t, 0);
11813 op1 = TREE_OPERAND (t, 1);
11814 op2 = TREE_OPERAND (t, 2);
11815 tem = fold_ternary (code, type, op0, op1, op2);
11816 return tem ? tem : expr;
11825 return fold (DECL_INITIAL (t));
11829 } /* switch (code) */
11832 #ifdef ENABLE_FOLD_CHECKING
11835 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
11836 static void fold_check_failed (tree, tree);
11837 void print_fold_checksum (tree);
11839 /* When --enable-checking=fold, compute a digest of expr before
11840 and after actual fold call to see if fold did not accidentally
11841 change original expr. */
11847 struct md5_ctx ctx;
11848 unsigned char checksum_before[16], checksum_after[16];
11851 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11852 md5_init_ctx (&ctx);
11853 fold_checksum_tree (expr, &ctx, ht);
11854 md5_finish_ctx (&ctx, checksum_before);
11857 ret = fold_1 (expr);
11859 md5_init_ctx (&ctx);
11860 fold_checksum_tree (expr, &ctx, ht);
11861 md5_finish_ctx (&ctx, checksum_after);
11864 if (memcmp (checksum_before, checksum_after, 16))
11865 fold_check_failed (expr, ret);
11871 print_fold_checksum (tree expr)
11873 struct md5_ctx ctx;
11874 unsigned char checksum[16], cnt;
11877 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11878 md5_init_ctx (&ctx);
11879 fold_checksum_tree (expr, &ctx, ht);
11880 md5_finish_ctx (&ctx, checksum);
11882 for (cnt = 0; cnt < 16; ++cnt)
11883 fprintf (stderr, "%02x", checksum[cnt]);
11884 putc ('\n', stderr);
11888 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
11890 internal_error ("fold check: original tree changed by fold");
11894 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
11897 enum tree_code code;
11898 struct tree_function_decl buf;
11903 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
11904 <= sizeof (struct tree_function_decl))
11905 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
11908 slot = htab_find_slot (ht, expr, INSERT);
11912 code = TREE_CODE (expr);
11913 if (TREE_CODE_CLASS (code) == tcc_declaration
11914 && DECL_ASSEMBLER_NAME_SET_P (expr))
11916 /* Allow DECL_ASSEMBLER_NAME to be modified. */
11917 memcpy ((char *) &buf, expr, tree_size (expr));
11918 expr = (tree) &buf;
11919 SET_DECL_ASSEMBLER_NAME (expr, NULL);
11921 else if (TREE_CODE_CLASS (code) == tcc_type
11922 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
11923 || TYPE_CACHED_VALUES_P (expr)
11924 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
11926 /* Allow these fields to be modified. */
11927 memcpy ((char *) &buf, expr, tree_size (expr));
11928 expr = (tree) &buf;
11929 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
11930 TYPE_POINTER_TO (expr) = NULL;
11931 TYPE_REFERENCE_TO (expr) = NULL;
11932 if (TYPE_CACHED_VALUES_P (expr))
11934 TYPE_CACHED_VALUES_P (expr) = 0;
11935 TYPE_CACHED_VALUES (expr) = NULL;
11938 md5_process_bytes (expr, tree_size (expr), ctx);
11939 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
11940 if (TREE_CODE_CLASS (code) != tcc_type
11941 && TREE_CODE_CLASS (code) != tcc_declaration
11942 && code != TREE_LIST)
11943 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
11944 switch (TREE_CODE_CLASS (code))
11950 md5_process_bytes (TREE_STRING_POINTER (expr),
11951 TREE_STRING_LENGTH (expr), ctx);
11954 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
11955 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
11958 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
11964 case tcc_exceptional:
11968 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
11969 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
11970 expr = TREE_CHAIN (expr);
11971 goto recursive_label;
11974 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
11975 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
11981 case tcc_expression:
11982 case tcc_reference:
11983 case tcc_comparison:
11986 case tcc_statement:
11987 len = TREE_CODE_LENGTH (code);
11988 for (i = 0; i < len; ++i)
11989 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
11991 case tcc_declaration:
11992 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
11993 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
11994 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
11996 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
11997 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
11998 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
11999 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12000 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12002 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
12003 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
12005 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12007 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12008 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12009 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
12013 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12014 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12015 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12016 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12017 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12018 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12019 if (INTEGRAL_TYPE_P (expr)
12020 || SCALAR_FLOAT_TYPE_P (expr))
12022 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12023 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12025 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12026 if (TREE_CODE (expr) == RECORD_TYPE
12027 || TREE_CODE (expr) == UNION_TYPE
12028 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12029 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12030 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12039 /* Fold a unary tree expression with code CODE of type TYPE with an
12040 operand OP0. Return a folded expression if successful. Otherwise,
12041 return a tree expression with code CODE of type TYPE with an
12045 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12048 #ifdef ENABLE_FOLD_CHECKING
12049 unsigned char checksum_before[16], checksum_after[16];
12050 struct md5_ctx ctx;
12053 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12054 md5_init_ctx (&ctx);
12055 fold_checksum_tree (op0, &ctx, ht);
12056 md5_finish_ctx (&ctx, checksum_before);
12060 tem = fold_unary (code, type, op0);
12062 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
12064 #ifdef ENABLE_FOLD_CHECKING
12065 md5_init_ctx (&ctx);
12066 fold_checksum_tree (op0, &ctx, ht);
12067 md5_finish_ctx (&ctx, checksum_after);
12070 if (memcmp (checksum_before, checksum_after, 16))
12071 fold_check_failed (op0, tem);
12076 /* Fold a binary tree expression with code CODE of type TYPE with
12077 operands OP0 and OP1. Return a folded expression if successful.
12078 Otherwise, return a tree expression with code CODE of type TYPE
12079 with operands OP0 and OP1. */
12082 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
12086 #ifdef ENABLE_FOLD_CHECKING
12087 unsigned char checksum_before_op0[16],
12088 checksum_before_op1[16],
12089 checksum_after_op0[16],
12090 checksum_after_op1[16];
12091 struct md5_ctx ctx;
12094 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12095 md5_init_ctx (&ctx);
12096 fold_checksum_tree (op0, &ctx, ht);
12097 md5_finish_ctx (&ctx, checksum_before_op0);
12100 md5_init_ctx (&ctx);
12101 fold_checksum_tree (op1, &ctx, ht);
12102 md5_finish_ctx (&ctx, checksum_before_op1);
12106 tem = fold_binary (code, type, op0, op1);
12108 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
12110 #ifdef ENABLE_FOLD_CHECKING
12111 md5_init_ctx (&ctx);
12112 fold_checksum_tree (op0, &ctx, ht);
12113 md5_finish_ctx (&ctx, checksum_after_op0);
12116 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12117 fold_check_failed (op0, tem);
12119 md5_init_ctx (&ctx);
12120 fold_checksum_tree (op1, &ctx, ht);
12121 md5_finish_ctx (&ctx, checksum_after_op1);
12124 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12125 fold_check_failed (op1, tem);
12130 /* Fold a ternary tree expression with code CODE of type TYPE with
12131 operands OP0, OP1, and OP2. Return a folded expression if
12132 successful. Otherwise, return a tree expression with code CODE of
12133 type TYPE with operands OP0, OP1, and OP2. */
12136 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
12140 #ifdef ENABLE_FOLD_CHECKING
12141 unsigned char checksum_before_op0[16],
12142 checksum_before_op1[16],
12143 checksum_before_op2[16],
12144 checksum_after_op0[16],
12145 checksum_after_op1[16],
12146 checksum_after_op2[16];
12147 struct md5_ctx ctx;
12150 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12151 md5_init_ctx (&ctx);
12152 fold_checksum_tree (op0, &ctx, ht);
12153 md5_finish_ctx (&ctx, checksum_before_op0);
12156 md5_init_ctx (&ctx);
12157 fold_checksum_tree (op1, &ctx, ht);
12158 md5_finish_ctx (&ctx, checksum_before_op1);
12161 md5_init_ctx (&ctx);
12162 fold_checksum_tree (op2, &ctx, ht);
12163 md5_finish_ctx (&ctx, checksum_before_op2);
12167 tem = fold_ternary (code, type, op0, op1, op2);
12169 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
12171 #ifdef ENABLE_FOLD_CHECKING
12172 md5_init_ctx (&ctx);
12173 fold_checksum_tree (op0, &ctx, ht);
12174 md5_finish_ctx (&ctx, checksum_after_op0);
12177 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12178 fold_check_failed (op0, tem);
12180 md5_init_ctx (&ctx);
12181 fold_checksum_tree (op1, &ctx, ht);
12182 md5_finish_ctx (&ctx, checksum_after_op1);
12185 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12186 fold_check_failed (op1, tem);
12188 md5_init_ctx (&ctx);
12189 fold_checksum_tree (op2, &ctx, ht);
12190 md5_finish_ctx (&ctx, checksum_after_op2);
12193 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12194 fold_check_failed (op2, tem);
12199 /* Perform constant folding and related simplification of initializer
12200 expression EXPR. These behave identically to "fold_buildN" but ignore
12201 potential run-time traps and exceptions that fold must preserve. */
12203 #define START_FOLD_INIT \
12204 int saved_signaling_nans = flag_signaling_nans;\
12205 int saved_trapping_math = flag_trapping_math;\
12206 int saved_rounding_math = flag_rounding_math;\
12207 int saved_trapv = flag_trapv;\
12208 int saved_folding_initializer = folding_initializer;\
12209 flag_signaling_nans = 0;\
12210 flag_trapping_math = 0;\
12211 flag_rounding_math = 0;\
12213 folding_initializer = 1;
12215 #define END_FOLD_INIT \
12216 flag_signaling_nans = saved_signaling_nans;\
12217 flag_trapping_math = saved_trapping_math;\
12218 flag_rounding_math = saved_rounding_math;\
12219 flag_trapv = saved_trapv;\
12220 folding_initializer = saved_folding_initializer;
12223 fold_build1_initializer (enum tree_code code, tree type, tree op)
12228 result = fold_build1 (code, type, op);
12235 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
12240 result = fold_build2 (code, type, op0, op1);
12247 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
12253 result = fold_build3 (code, type, op0, op1, op2);
12259 #undef START_FOLD_INIT
12260 #undef END_FOLD_INIT
12262 /* Determine if first argument is a multiple of second argument. Return 0 if
12263 it is not, or we cannot easily determined it to be.
12265 An example of the sort of thing we care about (at this point; this routine
12266 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12267 fold cases do now) is discovering that
12269 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12275 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12277 This code also handles discovering that
12279 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12281 is a multiple of 8 so we don't have to worry about dealing with a
12282 possible remainder.
12284 Note that we *look* inside a SAVE_EXPR only to determine how it was
12285 calculated; it is not safe for fold to do much of anything else with the
12286 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12287 at run time. For example, the latter example above *cannot* be implemented
12288 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12289 evaluation time of the original SAVE_EXPR is not necessarily the same at
12290 the time the new expression is evaluated. The only optimization of this
12291 sort that would be valid is changing
12293 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12297 SAVE_EXPR (I) * SAVE_EXPR (J)
12299 (where the same SAVE_EXPR (J) is used in the original and the
12300 transformed version). */
12303 multiple_of_p (tree type, tree top, tree bottom)
12305 if (operand_equal_p (top, bottom, 0))
12308 if (TREE_CODE (type) != INTEGER_TYPE)
12311 switch (TREE_CODE (top))
12314 /* Bitwise and provides a power of two multiple. If the mask is
12315 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12316 if (!integer_pow2p (bottom))
12321 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12322 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12326 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12327 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12330 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12334 op1 = TREE_OPERAND (top, 1);
12335 /* const_binop may not detect overflow correctly,
12336 so check for it explicitly here. */
12337 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
12338 > TREE_INT_CST_LOW (op1)
12339 && TREE_INT_CST_HIGH (op1) == 0
12340 && 0 != (t1 = fold_convert (type,
12341 const_binop (LSHIFT_EXPR,
12344 && ! TREE_OVERFLOW (t1))
12345 return multiple_of_p (type, t1, bottom);
12350 /* Can't handle conversions from non-integral or wider integral type. */
12351 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12352 || (TYPE_PRECISION (type)
12353 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12356 /* .. fall through ... */
12359 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12362 if (TREE_CODE (bottom) != INTEGER_CST
12363 || (TYPE_UNSIGNED (type)
12364 && (tree_int_cst_sgn (top) < 0
12365 || tree_int_cst_sgn (bottom) < 0)))
12367 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
12375 /* Return true if `t' is known to be non-negative. If the return
12376 value is based on the assumption that signed overflow is undefined,
12377 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12378 *STRICT_OVERFLOW_P. */
12381 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
12383 if (t == error_mark_node)
12386 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12389 switch (TREE_CODE (t))
12392 /* Query VRP to see if it has recorded any information about
12393 the range of this object. */
12394 return ssa_name_nonnegative_p (t);
12397 /* We can't return 1 if flag_wrapv is set because
12398 ABS_EXPR<INT_MIN> = INT_MIN. */
12399 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
12401 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
12403 *strict_overflow_p = true;
12409 return tree_int_cst_sgn (t) >= 0;
12412 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12415 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12416 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12418 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12419 strict_overflow_p));
12421 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12422 both unsigned and at least 2 bits shorter than the result. */
12423 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12424 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12425 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12427 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12428 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12429 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12430 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12432 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12433 TYPE_PRECISION (inner2)) + 1;
12434 return prec < TYPE_PRECISION (TREE_TYPE (t));
12440 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12442 /* x * x for floating point x is always non-negative. */
12443 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
12445 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12447 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12448 strict_overflow_p));
12451 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12452 both unsigned and their total bits is shorter than the result. */
12453 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12454 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12455 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12457 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12458 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12459 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12460 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12461 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
12462 < TYPE_PRECISION (TREE_TYPE (t));
12468 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12470 || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12471 strict_overflow_p));
12477 case TRUNC_DIV_EXPR:
12478 case CEIL_DIV_EXPR:
12479 case FLOOR_DIV_EXPR:
12480 case ROUND_DIV_EXPR:
12481 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12483 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12484 strict_overflow_p));
12486 case TRUNC_MOD_EXPR:
12487 case CEIL_MOD_EXPR:
12488 case FLOOR_MOD_EXPR:
12489 case ROUND_MOD_EXPR:
12491 case NON_LVALUE_EXPR:
12493 case FIX_TRUNC_EXPR:
12494 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12495 strict_overflow_p);
12497 case COMPOUND_EXPR:
12499 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12500 strict_overflow_p);
12503 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
12504 strict_overflow_p);
12507 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12509 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
12510 strict_overflow_p));
12514 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12515 tree outer_type = TREE_TYPE (t);
12517 if (TREE_CODE (outer_type) == REAL_TYPE)
12519 if (TREE_CODE (inner_type) == REAL_TYPE)
12520 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12521 strict_overflow_p);
12522 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12524 if (TYPE_UNSIGNED (inner_type))
12526 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12527 strict_overflow_p);
12530 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
12532 if (TREE_CODE (inner_type) == REAL_TYPE)
12533 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t,0),
12534 strict_overflow_p);
12535 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12536 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12537 && TYPE_UNSIGNED (inner_type);
12544 tree temp = TARGET_EXPR_SLOT (t);
12545 t = TARGET_EXPR_INITIAL (t);
12547 /* If the initializer is non-void, then it's a normal expression
12548 that will be assigned to the slot. */
12549 if (!VOID_TYPE_P (t))
12550 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
12552 /* Otherwise, the initializer sets the slot in some way. One common
12553 way is an assignment statement at the end of the initializer. */
12556 if (TREE_CODE (t) == BIND_EXPR)
12557 t = expr_last (BIND_EXPR_BODY (t));
12558 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12559 || TREE_CODE (t) == TRY_CATCH_EXPR)
12560 t = expr_last (TREE_OPERAND (t, 0));
12561 else if (TREE_CODE (t) == STATEMENT_LIST)
12566 if (TREE_CODE (t) == MODIFY_EXPR
12567 && TREE_OPERAND (t, 0) == temp)
12568 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12569 strict_overflow_p);
12576 tree fndecl = get_callee_fndecl (t);
12577 tree arglist = TREE_OPERAND (t, 1);
12578 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12579 switch (DECL_FUNCTION_CODE (fndecl))
12581 CASE_FLT_FN (BUILT_IN_ACOS):
12582 CASE_FLT_FN (BUILT_IN_ACOSH):
12583 CASE_FLT_FN (BUILT_IN_CABS):
12584 CASE_FLT_FN (BUILT_IN_COSH):
12585 CASE_FLT_FN (BUILT_IN_ERFC):
12586 CASE_FLT_FN (BUILT_IN_EXP):
12587 CASE_FLT_FN (BUILT_IN_EXP10):
12588 CASE_FLT_FN (BUILT_IN_EXP2):
12589 CASE_FLT_FN (BUILT_IN_FABS):
12590 CASE_FLT_FN (BUILT_IN_FDIM):
12591 CASE_FLT_FN (BUILT_IN_HYPOT):
12592 CASE_FLT_FN (BUILT_IN_POW10):
12593 CASE_INT_FN (BUILT_IN_FFS):
12594 CASE_INT_FN (BUILT_IN_PARITY):
12595 CASE_INT_FN (BUILT_IN_POPCOUNT):
12596 case BUILT_IN_BSWAP32:
12597 case BUILT_IN_BSWAP64:
12601 CASE_FLT_FN (BUILT_IN_SQRT):
12602 /* sqrt(-0.0) is -0.0. */
12603 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
12605 return tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12606 strict_overflow_p);
12608 CASE_FLT_FN (BUILT_IN_ASINH):
12609 CASE_FLT_FN (BUILT_IN_ATAN):
12610 CASE_FLT_FN (BUILT_IN_ATANH):
12611 CASE_FLT_FN (BUILT_IN_CBRT):
12612 CASE_FLT_FN (BUILT_IN_CEIL):
12613 CASE_FLT_FN (BUILT_IN_ERF):
12614 CASE_FLT_FN (BUILT_IN_EXPM1):
12615 CASE_FLT_FN (BUILT_IN_FLOOR):
12616 CASE_FLT_FN (BUILT_IN_FMOD):
12617 CASE_FLT_FN (BUILT_IN_FREXP):
12618 CASE_FLT_FN (BUILT_IN_LCEIL):
12619 CASE_FLT_FN (BUILT_IN_LDEXP):
12620 CASE_FLT_FN (BUILT_IN_LFLOOR):
12621 CASE_FLT_FN (BUILT_IN_LLCEIL):
12622 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12623 CASE_FLT_FN (BUILT_IN_LLRINT):
12624 CASE_FLT_FN (BUILT_IN_LLROUND):
12625 CASE_FLT_FN (BUILT_IN_LRINT):
12626 CASE_FLT_FN (BUILT_IN_LROUND):
12627 CASE_FLT_FN (BUILT_IN_MODF):
12628 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12629 CASE_FLT_FN (BUILT_IN_POW):
12630 CASE_FLT_FN (BUILT_IN_RINT):
12631 CASE_FLT_FN (BUILT_IN_ROUND):
12632 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12633 CASE_FLT_FN (BUILT_IN_SINH):
12634 CASE_FLT_FN (BUILT_IN_TANH):
12635 CASE_FLT_FN (BUILT_IN_TRUNC):
12636 /* True if the 1st argument is nonnegative. */
12637 return tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12638 strict_overflow_p);
12640 CASE_FLT_FN (BUILT_IN_FMAX):
12641 /* True if the 1st OR 2nd arguments are nonnegative. */
12642 return (tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12644 || (tree_expr_nonnegative_warnv_p
12645 (TREE_VALUE (TREE_CHAIN (arglist)),
12646 strict_overflow_p)));
12648 CASE_FLT_FN (BUILT_IN_FMIN):
12649 /* True if the 1st AND 2nd arguments are nonnegative. */
12650 return (tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12652 && (tree_expr_nonnegative_warnv_p
12653 (TREE_VALUE (TREE_CHAIN (arglist)),
12654 strict_overflow_p)));
12656 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12657 /* True if the 2nd argument is nonnegative. */
12658 return (tree_expr_nonnegative_warnv_p
12659 (TREE_VALUE (TREE_CHAIN (arglist)),
12660 strict_overflow_p));
12667 /* ... fall through ... */
12671 tree type = TREE_TYPE (t);
12672 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12673 && truth_value_p (TREE_CODE (t)))
12674 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12675 have a signed:1 type (where the value is -1 and 0). */
12680 /* We don't know sign of `t', so be conservative and return false. */
12684 /* Return true if `t' is known to be non-negative. Handle warnings
12685 about undefined signed overflow. */
12688 tree_expr_nonnegative_p (tree t)
12691 bool strict_overflow_p;
12693 strict_overflow_p = false;
12694 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
12695 if (strict_overflow_p)
12696 fold_overflow_warning (("assuming signed overflow does not occur when "
12697 "determining that expression is always "
12699 WARN_STRICT_OVERFLOW_MISC);
12703 /* Return true when T is an address and is known to be nonzero.
12704 For floating point we further ensure that T is not denormal.
12705 Similar logic is present in nonzero_address in rtlanal.h.
12707 If the return value is based on the assumption that signed overflow
12708 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
12709 change *STRICT_OVERFLOW_P. */
12712 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
12714 tree type = TREE_TYPE (t);
12715 bool sub_strict_overflow_p;
12717 /* Doing something useful for floating point would need more work. */
12718 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12721 switch (TREE_CODE (t))
12724 /* Query VRP to see if it has recorded any information about
12725 the range of this object. */
12726 return ssa_name_nonzero_p (t);
12729 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12730 strict_overflow_p);
12733 /* We used to test for !integer_zerop here. This does not work correctly
12734 if TREE_CONSTANT_OVERFLOW (t). */
12735 return (TREE_INT_CST_LOW (t) != 0
12736 || TREE_INT_CST_HIGH (t) != 0);
12739 if (TYPE_OVERFLOW_UNDEFINED (type))
12741 /* With the presence of negative values it is hard
12742 to say something. */
12743 sub_strict_overflow_p = false;
12744 if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12745 &sub_strict_overflow_p)
12746 || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12747 &sub_strict_overflow_p))
12749 /* One of operands must be positive and the other non-negative. */
12750 /* We don't set *STRICT_OVERFLOW_P here: even if this value
12751 overflows, on a twos-complement machine the sum of two
12752 nonnegative numbers can never be zero. */
12753 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12755 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12756 strict_overflow_p));
12761 if (TYPE_OVERFLOW_UNDEFINED (type))
12763 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12765 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12766 strict_overflow_p))
12768 *strict_overflow_p = true;
12776 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12777 tree outer_type = TREE_TYPE (t);
12779 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12780 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12781 strict_overflow_p));
12787 tree base = get_base_address (TREE_OPERAND (t, 0));
12792 /* Weak declarations may link to NULL. */
12793 if (VAR_OR_FUNCTION_DECL_P (base))
12794 return !DECL_WEAK (base);
12796 /* Constants are never weak. */
12797 if (CONSTANT_CLASS_P (base))
12804 sub_strict_overflow_p = false;
12805 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12806 &sub_strict_overflow_p)
12807 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
12808 &sub_strict_overflow_p))
12810 if (sub_strict_overflow_p)
12811 *strict_overflow_p = true;
12817 sub_strict_overflow_p = false;
12818 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12819 &sub_strict_overflow_p)
12820 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12821 &sub_strict_overflow_p))
12823 if (sub_strict_overflow_p)
12824 *strict_overflow_p = true;
12829 sub_strict_overflow_p = false;
12830 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12831 &sub_strict_overflow_p))
12833 if (sub_strict_overflow_p)
12834 *strict_overflow_p = true;
12836 /* When both operands are nonzero, then MAX must be too. */
12837 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12838 strict_overflow_p))
12841 /* MAX where operand 0 is positive is positive. */
12842 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12843 strict_overflow_p);
12845 /* MAX where operand 1 is positive is positive. */
12846 else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12847 &sub_strict_overflow_p)
12848 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12849 &sub_strict_overflow_p))
12851 if (sub_strict_overflow_p)
12852 *strict_overflow_p = true;
12857 case COMPOUND_EXPR:
12860 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12861 strict_overflow_p);
12864 case NON_LVALUE_EXPR:
12865 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12866 strict_overflow_p);
12869 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12871 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12872 strict_overflow_p));
12875 return alloca_call_p (t);
12883 /* Return true when T is an address and is known to be nonzero.
12884 Handle warnings about undefined signed overflow. */
12887 tree_expr_nonzero_p (tree t)
12889 bool ret, strict_overflow_p;
12891 strict_overflow_p = false;
12892 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
12893 if (strict_overflow_p)
12894 fold_overflow_warning (("assuming signed overflow does not occur when "
12895 "determining that expression is always "
12897 WARN_STRICT_OVERFLOW_MISC);
12901 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12902 attempt to fold the expression to a constant without modifying TYPE,
12905 If the expression could be simplified to a constant, then return
12906 the constant. If the expression would not be simplified to a
12907 constant, then return NULL_TREE. */
12910 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12912 tree tem = fold_binary (code, type, op0, op1);
12913 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12916 /* Given the components of a unary expression CODE, TYPE and OP0,
12917 attempt to fold the expression to a constant without modifying
12920 If the expression could be simplified to a constant, then return
12921 the constant. If the expression would not be simplified to a
12922 constant, then return NULL_TREE. */
12925 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
12927 tree tem = fold_unary (code, type, op0);
12928 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12931 /* If EXP represents referencing an element in a constant string
12932 (either via pointer arithmetic or array indexing), return the
12933 tree representing the value accessed, otherwise return NULL. */
12936 fold_read_from_constant_string (tree exp)
12938 if ((TREE_CODE (exp) == INDIRECT_REF
12939 || TREE_CODE (exp) == ARRAY_REF)
12940 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
12942 tree exp1 = TREE_OPERAND (exp, 0);
12946 if (TREE_CODE (exp) == INDIRECT_REF)
12947 string = string_constant (exp1, &index);
12950 tree low_bound = array_ref_low_bound (exp);
12951 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
12953 /* Optimize the special-case of a zero lower bound.
12955 We convert the low_bound to sizetype to avoid some problems
12956 with constant folding. (E.g. suppose the lower bound is 1,
12957 and its mode is QI. Without the conversion,l (ARRAY
12958 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
12959 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
12960 if (! integer_zerop (low_bound))
12961 index = size_diffop (index, fold_convert (sizetype, low_bound));
12967 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
12968 && TREE_CODE (string) == STRING_CST
12969 && TREE_CODE (index) == INTEGER_CST
12970 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
12971 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
12973 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
12974 return fold_convert (TREE_TYPE (exp),
12975 build_int_cst (NULL_TREE,
12976 (TREE_STRING_POINTER (string)
12977 [TREE_INT_CST_LOW (index)])));
12982 /* Return the tree for neg (ARG0) when ARG0 is known to be either
12983 an integer constant or real constant.
12985 TYPE is the type of the result. */
12988 fold_negate_const (tree arg0, tree type)
12990 tree t = NULL_TREE;
12992 switch (TREE_CODE (arg0))
12996 unsigned HOST_WIDE_INT low;
12997 HOST_WIDE_INT high;
12998 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12999 TREE_INT_CST_HIGH (arg0),
13001 t = build_int_cst_wide (type, low, high);
13002 t = force_fit_type (t, 1,
13003 (overflow | TREE_OVERFLOW (arg0))
13004 && !TYPE_UNSIGNED (type),
13005 TREE_CONSTANT_OVERFLOW (arg0));
13010 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13014 gcc_unreachable ();
13020 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13021 an integer constant or real constant.
13023 TYPE is the type of the result. */
13026 fold_abs_const (tree arg0, tree type)
13028 tree t = NULL_TREE;
13030 switch (TREE_CODE (arg0))
13033 /* If the value is unsigned, then the absolute value is
13034 the same as the ordinary value. */
13035 if (TYPE_UNSIGNED (type))
13037 /* Similarly, if the value is non-negative. */
13038 else if (INT_CST_LT (integer_minus_one_node, arg0))
13040 /* If the value is negative, then the absolute value is
13044 unsigned HOST_WIDE_INT low;
13045 HOST_WIDE_INT high;
13046 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13047 TREE_INT_CST_HIGH (arg0),
13049 t = build_int_cst_wide (type, low, high);
13050 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
13051 TREE_CONSTANT_OVERFLOW (arg0));
13056 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13057 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13063 gcc_unreachable ();
13069 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13070 constant. TYPE is the type of the result. */
13073 fold_not_const (tree arg0, tree type)
13075 tree t = NULL_TREE;
13077 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13079 t = build_int_cst_wide (type,
13080 ~ TREE_INT_CST_LOW (arg0),
13081 ~ TREE_INT_CST_HIGH (arg0));
13082 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
13083 TREE_CONSTANT_OVERFLOW (arg0));
13088 /* Given CODE, a relational operator, the target type, TYPE and two
13089 constant operands OP0 and OP1, return the result of the
13090 relational operation. If the result is not a compile time
13091 constant, then return NULL_TREE. */
13094 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13096 int result, invert;
13098 /* From here on, the only cases we handle are when the result is
13099 known to be a constant. */
13101 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13103 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13104 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13106 /* Handle the cases where either operand is a NaN. */
13107 if (real_isnan (c0) || real_isnan (c1))
13117 case UNORDERED_EXPR:
13131 if (flag_trapping_math)
13137 gcc_unreachable ();
13140 return constant_boolean_node (result, type);
13143 return constant_boolean_node (real_compare (code, c0, c1), type);
13146 /* Handle equality/inequality of complex constants. */
13147 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13149 tree rcond = fold_relational_const (code, type,
13150 TREE_REALPART (op0),
13151 TREE_REALPART (op1));
13152 tree icond = fold_relational_const (code, type,
13153 TREE_IMAGPART (op0),
13154 TREE_IMAGPART (op1));
13155 if (code == EQ_EXPR)
13156 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13157 else if (code == NE_EXPR)
13158 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13163 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13165 To compute GT, swap the arguments and do LT.
13166 To compute GE, do LT and invert the result.
13167 To compute LE, swap the arguments, do LT and invert the result.
13168 To compute NE, do EQ and invert the result.
13170 Therefore, the code below must handle only EQ and LT. */
13172 if (code == LE_EXPR || code == GT_EXPR)
13177 code = swap_tree_comparison (code);
13180 /* Note that it is safe to invert for real values here because we
13181 have already handled the one case that it matters. */
13184 if (code == NE_EXPR || code == GE_EXPR)
13187 code = invert_tree_comparison (code, false);
13190 /* Compute a result for LT or EQ if args permit;
13191 Otherwise return T. */
13192 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13194 if (code == EQ_EXPR)
13195 result = tree_int_cst_equal (op0, op1);
13196 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
13197 result = INT_CST_LT_UNSIGNED (op0, op1);
13199 result = INT_CST_LT (op0, op1);
13206 return constant_boolean_node (result, type);
13209 /* Build an expression for the a clean point containing EXPR with type TYPE.
13210 Don't build a cleanup point expression for EXPR which don't have side
13214 fold_build_cleanup_point_expr (tree type, tree expr)
13216 /* If the expression does not have side effects then we don't have to wrap
13217 it with a cleanup point expression. */
13218 if (!TREE_SIDE_EFFECTS (expr))
13221 /* If the expression is a return, check to see if the expression inside the
13222 return has no side effects or the right hand side of the modify expression
13223 inside the return. If either don't have side effects set we don't need to
13224 wrap the expression in a cleanup point expression. Note we don't check the
13225 left hand side of the modify because it should always be a return decl. */
13226 if (TREE_CODE (expr) == RETURN_EXPR)
13228 tree op = TREE_OPERAND (expr, 0);
13229 if (!op || !TREE_SIDE_EFFECTS (op))
13231 op = TREE_OPERAND (op, 1);
13232 if (!TREE_SIDE_EFFECTS (op))
13236 return build1 (CLEANUP_POINT_EXPR, type, expr);
13239 /* Build an expression for the address of T. Folds away INDIRECT_REF to
13240 avoid confusing the gimplify process. */
13243 build_fold_addr_expr_with_type (tree t, tree ptrtype)
13245 /* The size of the object is not relevant when talking about its address. */
13246 if (TREE_CODE (t) == WITH_SIZE_EXPR)
13247 t = TREE_OPERAND (t, 0);
13249 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
13250 if (TREE_CODE (t) == INDIRECT_REF
13251 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
13253 t = TREE_OPERAND (t, 0);
13254 if (TREE_TYPE (t) != ptrtype)
13255 t = build1 (NOP_EXPR, ptrtype, t);
13261 while (handled_component_p (base))
13262 base = TREE_OPERAND (base, 0);
13264 TREE_ADDRESSABLE (base) = 1;
13266 t = build1 (ADDR_EXPR, ptrtype, t);
13273 build_fold_addr_expr (tree t)
13275 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
13278 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13279 of an indirection through OP0, or NULL_TREE if no simplification is
13283 fold_indirect_ref_1 (tree type, tree op0)
13289 subtype = TREE_TYPE (sub);
13290 if (!POINTER_TYPE_P (subtype))
13293 if (TREE_CODE (sub) == ADDR_EXPR)
13295 tree op = TREE_OPERAND (sub, 0);
13296 tree optype = TREE_TYPE (op);
13297 /* *&CONST_DECL -> to the value of the const decl. */
13298 if (TREE_CODE (op) == CONST_DECL)
13299 return DECL_INITIAL (op);
13300 /* *&p => p; make sure to handle *&"str"[cst] here. */
13301 if (type == optype)
13303 tree fop = fold_read_from_constant_string (op);
13309 /* *(foo *)&fooarray => fooarray[0] */
13310 else if (TREE_CODE (optype) == ARRAY_TYPE
13311 && type == TREE_TYPE (optype))
13313 tree type_domain = TYPE_DOMAIN (optype);
13314 tree min_val = size_zero_node;
13315 if (type_domain && TYPE_MIN_VALUE (type_domain))
13316 min_val = TYPE_MIN_VALUE (type_domain);
13317 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
13319 /* *(foo *)&complexfoo => __real__ complexfoo */
13320 else if (TREE_CODE (optype) == COMPLEX_TYPE
13321 && type == TREE_TYPE (optype))
13322 return fold_build1 (REALPART_EXPR, type, op);
13325 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13326 if (TREE_CODE (sub) == PLUS_EXPR
13327 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13329 tree op00 = TREE_OPERAND (sub, 0);
13330 tree op01 = TREE_OPERAND (sub, 1);
13334 op00type = TREE_TYPE (op00);
13335 if (TREE_CODE (op00) == ADDR_EXPR
13336 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
13337 && type == TREE_TYPE (TREE_TYPE (op00type)))
13339 tree size = TYPE_SIZE_UNIT (type);
13340 if (tree_int_cst_equal (size, op01))
13341 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
13345 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13346 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13347 && type == TREE_TYPE (TREE_TYPE (subtype)))
13350 tree min_val = size_zero_node;
13351 sub = build_fold_indirect_ref (sub);
13352 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13353 if (type_domain && TYPE_MIN_VALUE (type_domain))
13354 min_val = TYPE_MIN_VALUE (type_domain);
13355 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
13361 /* Builds an expression for an indirection through T, simplifying some
13365 build_fold_indirect_ref (tree t)
13367 tree type = TREE_TYPE (TREE_TYPE (t));
13368 tree sub = fold_indirect_ref_1 (type, t);
13373 return build1 (INDIRECT_REF, type, t);
13376 /* Given an INDIRECT_REF T, return either T or a simplified version. */
13379 fold_indirect_ref (tree t)
13381 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
13389 /* Strip non-trapping, non-side-effecting tree nodes from an expression
13390 whose result is ignored. The type of the returned tree need not be
13391 the same as the original expression. */
13394 fold_ignored_result (tree t)
13396 if (!TREE_SIDE_EFFECTS (t))
13397 return integer_zero_node;
13400 switch (TREE_CODE_CLASS (TREE_CODE (t)))
13403 t = TREE_OPERAND (t, 0);
13407 case tcc_comparison:
13408 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13409 t = TREE_OPERAND (t, 0);
13410 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
13411 t = TREE_OPERAND (t, 1);
13416 case tcc_expression:
13417 switch (TREE_CODE (t))
13419 case COMPOUND_EXPR:
13420 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13422 t = TREE_OPERAND (t, 0);
13426 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
13427 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
13429 t = TREE_OPERAND (t, 0);
13442 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
13443 This can only be applied to objects of a sizetype. */
13446 round_up (tree value, int divisor)
13448 tree div = NULL_TREE;
13450 gcc_assert (divisor > 0);
13454 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13455 have to do anything. Only do this when we are not given a const,
13456 because in that case, this check is more expensive than just
13458 if (TREE_CODE (value) != INTEGER_CST)
13460 div = build_int_cst (TREE_TYPE (value), divisor);
13462 if (multiple_of_p (TREE_TYPE (value), value, div))
13466 /* If divisor is a power of two, simplify this to bit manipulation. */
13467 if (divisor == (divisor & -divisor))
13471 t = build_int_cst (TREE_TYPE (value), divisor - 1);
13472 value = size_binop (PLUS_EXPR, value, t);
13473 t = build_int_cst (TREE_TYPE (value), -divisor);
13474 value = size_binop (BIT_AND_EXPR, value, t);
13479 div = build_int_cst (TREE_TYPE (value), divisor);
13480 value = size_binop (CEIL_DIV_EXPR, value, div);
13481 value = size_binop (MULT_EXPR, value, div);
13487 /* Likewise, but round down. */
13490 round_down (tree value, int divisor)
13492 tree div = NULL_TREE;
13494 gcc_assert (divisor > 0);
13498 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13499 have to do anything. Only do this when we are not given a const,
13500 because in that case, this check is more expensive than just
13502 if (TREE_CODE (value) != INTEGER_CST)
13504 div = build_int_cst (TREE_TYPE (value), divisor);
13506 if (multiple_of_p (TREE_TYPE (value), value, div))
13510 /* If divisor is a power of two, simplify this to bit manipulation. */
13511 if (divisor == (divisor & -divisor))
13515 t = build_int_cst (TREE_TYPE (value), -divisor);
13516 value = size_binop (BIT_AND_EXPR, value, t);
13521 div = build_int_cst (TREE_TYPE (value), divisor);
13522 value = size_binop (FLOOR_DIV_EXPR, value, div);
13523 value = size_binop (MULT_EXPR, value, div);
13529 /* Returns the pointer to the base of the object addressed by EXP and
13530 extracts the information about the offset of the access, storing it
13531 to PBITPOS and POFFSET. */
13534 split_address_to_core_and_offset (tree exp,
13535 HOST_WIDE_INT *pbitpos, tree *poffset)
13538 enum machine_mode mode;
13539 int unsignedp, volatilep;
13540 HOST_WIDE_INT bitsize;
13542 if (TREE_CODE (exp) == ADDR_EXPR)
13544 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
13545 poffset, &mode, &unsignedp, &volatilep,
13547 core = build_fold_addr_expr (core);
13553 *poffset = NULL_TREE;
13559 /* Returns true if addresses of E1 and E2 differ by a constant, false
13560 otherwise. If they do, E1 - E2 is stored in *DIFF. */
13563 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
13566 HOST_WIDE_INT bitpos1, bitpos2;
13567 tree toffset1, toffset2, tdiff, type;
13569 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
13570 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
13572 if (bitpos1 % BITS_PER_UNIT != 0
13573 || bitpos2 % BITS_PER_UNIT != 0
13574 || !operand_equal_p (core1, core2, 0))
13577 if (toffset1 && toffset2)
13579 type = TREE_TYPE (toffset1);
13580 if (type != TREE_TYPE (toffset2))
13581 toffset2 = fold_convert (type, toffset2);
13583 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
13584 if (!cst_and_fits_in_hwi (tdiff))
13587 *diff = int_cst_value (tdiff);
13589 else if (toffset1 || toffset2)
13591 /* If only one of the offsets is non-constant, the difference cannot
13598 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
13602 /* Simplify the floating point expression EXP when the sign of the
13603 result is not significant. Return NULL_TREE if no simplification
13607 fold_strip_sign_ops (tree exp)
13611 switch (TREE_CODE (exp))
13615 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13616 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
13620 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
13622 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13623 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13624 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
13625 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
13626 arg0 ? arg0 : TREE_OPERAND (exp, 0),
13627 arg1 ? arg1 : TREE_OPERAND (exp, 1));