1 | /* Support for fully folding sub-trees of an expression for C compiler. |
2 | Copyright (C) 1992-2017 Free Software Foundation, Inc. |
3 | |
4 | This file is part of GCC. |
5 | |
6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free |
8 | Software Foundation; either version 3, or (at your option) any later |
9 | version. |
10 | |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
14 | for more details. |
15 | |
16 | You should have received a copy of the GNU General Public License |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ |
19 | |
20 | #include "config.h" |
21 | #include "system.h" |
22 | #include "coretypes.h" |
23 | #include "target.h" |
24 | #include "function.h" |
25 | #include "bitmap.h" |
26 | #include "c-tree.h" |
27 | #include "intl.h" |
28 | #include "gimplify.h" |
29 | |
30 | static tree c_fully_fold_internal (tree expr, bool, bool *, bool *, bool, |
31 | bool); |
32 | |
33 | /* If DISABLE is true, stop issuing warnings. This is used when |
34 | parsing code that we know will not be executed. This function may |
35 | be called multiple times, and works as a stack. */ |
36 | |
37 | static void |
38 | c_disable_warnings (bool disable) |
39 | { |
40 | if (disable) |
41 | { |
42 | ++c_inhibit_evaluation_warnings; |
43 | fold_defer_overflow_warnings (); |
44 | } |
45 | } |
46 | |
47 | /* If ENABLE is true, reenable issuing warnings. */ |
48 | |
49 | static void |
50 | c_enable_warnings (bool enable) |
51 | { |
52 | if (enable) |
53 | { |
54 | --c_inhibit_evaluation_warnings; |
55 | fold_undefer_and_ignore_overflow_warnings (); |
56 | } |
57 | } |
58 | |
59 | /* Try to fold ARRAY_REF ary[index] if possible and not handled by |
60 | normal fold, return NULL_TREE otherwise. */ |
61 | |
62 | static tree |
63 | c_fold_array_ref (tree type, tree ary, tree index) |
64 | { |
65 | if (TREE_CODE (ary) != STRING_CST |
66 | || TREE_CODE (index) != INTEGER_CST |
67 | || TREE_OVERFLOW (index) |
68 | || TREE_CODE (TREE_TYPE (ary)) != ARRAY_TYPE |
69 | || !tree_fits_uhwi_p (index)) |
70 | return NULL_TREE; |
71 | |
72 | tree elem_type = TREE_TYPE (TREE_TYPE (ary)); |
73 | unsigned elem_nchars = (TYPE_PRECISION (elem_type) |
74 | / TYPE_PRECISION (char_type_node)); |
75 | unsigned len = (unsigned) TREE_STRING_LENGTH (ary) / elem_nchars; |
76 | tree nelts = array_type_nelts (TREE_TYPE (ary)); |
77 | bool dummy1 = true, dummy2 = true; |
78 | nelts = c_fully_fold_internal (nelts, true, &dummy1, &dummy2, false, false); |
79 | unsigned HOST_WIDE_INT i = tree_to_uhwi (index); |
80 | if (!tree_int_cst_le (index, nelts) |
81 | || i >= len |
82 | || i + elem_nchars > len) |
83 | return NULL_TREE; |
84 | |
85 | if (elem_nchars == 1) |
86 | return build_int_cst (type, TREE_STRING_POINTER (ary)[i]); |
87 | |
88 | const unsigned char *ptr |
89 | = ((const unsigned char *)TREE_STRING_POINTER (ary) + i * elem_nchars); |
90 | return native_interpret_expr (type, ptr, elem_nchars); |
91 | } |
92 | |
93 | /* Fully fold EXPR, an expression that was not folded (beyond integer |
94 | constant expressions and null pointer constants) when being built |
95 | up. If IN_INIT, this is in a static initializer and certain |
96 | changes are made to the folding done. Clear *MAYBE_CONST if |
97 | MAYBE_CONST is not NULL and EXPR is definitely not a constant |
98 | expression because it contains an evaluated operator (in C99) or an |
99 | operator outside of sizeof returning an integer constant (in C90) |
100 | not permitted in constant expressions, or because it contains an |
101 | evaluated arithmetic overflow. (*MAYBE_CONST should typically be |
102 | set to true by callers before calling this function.) Return the |
103 | folded expression. Function arguments have already been folded |
104 | before calling this function, as have the contents of SAVE_EXPR, |
105 | TARGET_EXPR, BIND_EXPR, VA_ARG_EXPR, OBJ_TYPE_REF and |
106 | C_MAYBE_CONST_EXPR. LVAL is true if it should be treated as an |
107 | lvalue. */ |
108 | |
109 | tree |
110 | c_fully_fold (tree expr, bool in_init, bool *maybe_const, bool lval) |
111 | { |
112 | tree ret; |
113 | tree eptype = NULL_TREE; |
114 | bool dummy = true; |
115 | bool maybe_const_itself = true; |
116 | location_t loc = EXPR_LOCATION (expr); |
117 | |
118 | if (!maybe_const) |
119 | maybe_const = &dummy; |
120 | if (TREE_CODE (expr) == EXCESS_PRECISION_EXPR) |
121 | { |
122 | eptype = TREE_TYPE (expr); |
123 | expr = TREE_OPERAND (expr, 0); |
124 | } |
125 | ret = c_fully_fold_internal (expr, in_init, maybe_const, |
126 | &maybe_const_itself, false, lval); |
127 | if (eptype) |
128 | ret = fold_convert_loc (loc, eptype, ret); |
129 | *maybe_const &= maybe_const_itself; |
130 | return ret; |
131 | } |
132 | |
133 | /* Internal helper for c_fully_fold. EXPR and IN_INIT are as for |
134 | c_fully_fold. *MAYBE_CONST_OPERANDS is cleared because of operands |
135 | not permitted, while *MAYBE_CONST_ITSELF is cleared because of |
136 | arithmetic overflow (for C90, *MAYBE_CONST_OPERANDS is carried from |
137 | both evaluated and unevaluated subexpressions while |
138 | *MAYBE_CONST_ITSELF is carried from only evaluated |
139 | subexpressions). FOR_INT_CONST indicates if EXPR is an expression |
140 | with integer constant operands, and if any of the operands doesn't |
141 | get folded to an integer constant, don't fold the expression itself. |
142 | LVAL indicates folding of lvalue, where we can't replace it with |
143 | an rvalue. */ |
144 | |
145 | static tree |
146 | c_fully_fold_internal (tree expr, bool in_init, bool *maybe_const_operands, |
147 | bool *maybe_const_itself, bool for_int_const, bool lval) |
148 | { |
149 | tree ret = expr; |
150 | enum tree_code code = TREE_CODE (expr); |
151 | enum tree_code_class kind = TREE_CODE_CLASS (code); |
152 | location_t loc = EXPR_LOCATION (expr); |
153 | tree op0, op1, op2, op3; |
154 | tree orig_op0, orig_op1, orig_op2; |
155 | bool op0_const = true, op1_const = true, op2_const = true; |
156 | bool op0_const_self = true, op1_const_self = true, op2_const_self = true; |
157 | bool nowarning = TREE_NO_WARNING (expr); |
158 | bool unused_p; |
159 | bool op0_lval = false; |
160 | source_range old_range; |
161 | |
162 | /* Constants, declarations, statements, errors, and anything else not |
163 | counted as an expression cannot usefully be folded further at this |
164 | point. */ |
165 | if (!IS_EXPR_CODE_CLASS (kind) || kind == tcc_statement) |
166 | { |
167 | /* Except for variables which we can optimize to its initializer. */ |
168 | if (VAR_P (expr) && !lval && (optimize || in_init)) |
169 | { |
170 | if (in_init) |
171 | ret = decl_constant_value_1 (expr); |
172 | else |
173 | ret = decl_constant_value (expr); |
174 | /* Avoid unwanted tree sharing between the initializer and current |
175 | function's body where the tree can be modified e.g. by the |
176 | gimplifier. */ |
177 | if (ret != expr && TREE_STATIC (expr)) |
178 | ret = unshare_expr (ret); |
179 | return ret; |
180 | } |
181 | return expr; |
182 | } |
183 | |
184 | if (IS_EXPR_CODE_CLASS (kind)) |
185 | old_range = EXPR_LOCATION_RANGE (expr); |
186 | |
187 | /* Operands of variable-length expressions (function calls) have |
188 | already been folded, as have __builtin_* function calls, and such |
189 | expressions cannot occur in constant expressions. */ |
190 | if (kind == tcc_vl_exp) |
191 | { |
192 | *maybe_const_operands = false; |
193 | ret = fold (expr); |
194 | goto out; |
195 | } |
196 | |
197 | if (code == C_MAYBE_CONST_EXPR) |
198 | { |
199 | tree pre = C_MAYBE_CONST_EXPR_PRE (expr); |
200 | tree inner = C_MAYBE_CONST_EXPR_EXPR (expr); |
201 | if (C_MAYBE_CONST_EXPR_NON_CONST (expr)) |
202 | *maybe_const_operands = false; |
203 | if (C_MAYBE_CONST_EXPR_INT_OPERANDS (expr)) |
204 | { |
205 | *maybe_const_itself = false; |
206 | inner = c_fully_fold_internal (inner, in_init, maybe_const_operands, |
207 | maybe_const_itself, true, lval); |
208 | } |
209 | if (pre && !in_init) |
210 | ret = build2 (COMPOUND_EXPR, TREE_TYPE (expr), pre, inner); |
211 | else |
212 | ret = inner; |
213 | goto out; |
214 | } |
215 | |
216 | /* Assignment, increment, decrement, function call and comma |
217 | operators, and statement expressions, cannot occur in constant |
218 | expressions if evaluated / outside of sizeof. (Function calls |
219 | were handled above, though VA_ARG_EXPR is treated like a function |
220 | call here, and statement expressions are handled through |
221 | C_MAYBE_CONST_EXPR to avoid folding inside them.) */ |
222 | switch (code) |
223 | { |
224 | case MODIFY_EXPR: |
225 | case PREDECREMENT_EXPR: |
226 | case PREINCREMENT_EXPR: |
227 | case POSTDECREMENT_EXPR: |
228 | case POSTINCREMENT_EXPR: |
229 | case COMPOUND_EXPR: |
230 | *maybe_const_operands = false; |
231 | break; |
232 | |
233 | case VA_ARG_EXPR: |
234 | case TARGET_EXPR: |
235 | case BIND_EXPR: |
236 | case OBJ_TYPE_REF: |
237 | *maybe_const_operands = false; |
238 | ret = fold (expr); |
239 | goto out; |
240 | |
241 | default: |
242 | break; |
243 | } |
244 | |
245 | /* Fold individual tree codes as appropriate. */ |
246 | switch (code) |
247 | { |
248 | case COMPOUND_LITERAL_EXPR: |
249 | /* Any non-constancy will have been marked in a containing |
250 | C_MAYBE_CONST_EXPR; there is no more folding to do here. */ |
251 | goto out; |
252 | |
253 | case COMPONENT_REF: |
254 | orig_op0 = op0 = TREE_OPERAND (expr, 0); |
255 | op1 = TREE_OPERAND (expr, 1); |
256 | op2 = TREE_OPERAND (expr, 2); |
257 | op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands, |
258 | maybe_const_itself, for_int_const, lval); |
259 | STRIP_TYPE_NOPS (op0); |
260 | if (op0 != orig_op0) |
261 | ret = build3 (COMPONENT_REF, TREE_TYPE (expr), op0, op1, op2); |
262 | if (ret != expr) |
263 | { |
264 | TREE_READONLY (ret) = TREE_READONLY (expr); |
265 | TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr); |
266 | } |
267 | goto out; |
268 | |
269 | case ARRAY_REF: |
270 | orig_op0 = op0 = TREE_OPERAND (expr, 0); |
271 | orig_op1 = op1 = TREE_OPERAND (expr, 1); |
272 | op2 = TREE_OPERAND (expr, 2); |
273 | op3 = TREE_OPERAND (expr, 3); |
274 | op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands, |
275 | maybe_const_itself, for_int_const, lval); |
276 | STRIP_TYPE_NOPS (op0); |
277 | op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands, |
278 | maybe_const_itself, for_int_const, false); |
279 | STRIP_TYPE_NOPS (op1); |
280 | /* Fold "foo"[2] in initializers. */ |
281 | if (!lval && in_init) |
282 | { |
283 | ret = c_fold_array_ref (TREE_TYPE (expr), op0, op1); |
284 | if (ret) |
285 | goto out; |
286 | ret = expr; |
287 | } |
288 | if (op0 != orig_op0 || op1 != orig_op1) |
289 | ret = build4 (ARRAY_REF, TREE_TYPE (expr), op0, op1, op2, op3); |
290 | if (ret != expr) |
291 | { |
292 | TREE_READONLY (ret) = TREE_READONLY (expr); |
293 | TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (expr); |
294 | TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr); |
295 | } |
296 | if (!lval) |
297 | ret = fold (ret); |
298 | goto out; |
299 | |
300 | case MODIFY_EXPR: |
301 | case PREDECREMENT_EXPR: |
302 | case PREINCREMENT_EXPR: |
303 | case POSTDECREMENT_EXPR: |
304 | case POSTINCREMENT_EXPR: |
305 | op0_lval = true; |
306 | /* FALLTHRU */ |
307 | case COMPOUND_EXPR: |
308 | case PLUS_EXPR: |
309 | case MINUS_EXPR: |
310 | case MULT_EXPR: |
311 | case POINTER_PLUS_EXPR: |
312 | case POINTER_DIFF_EXPR: |
313 | case TRUNC_DIV_EXPR: |
314 | case CEIL_DIV_EXPR: |
315 | case FLOOR_DIV_EXPR: |
316 | case TRUNC_MOD_EXPR: |
317 | case RDIV_EXPR: |
318 | case EXACT_DIV_EXPR: |
319 | case LSHIFT_EXPR: |
320 | case RSHIFT_EXPR: |
321 | case BIT_IOR_EXPR: |
322 | case BIT_XOR_EXPR: |
323 | case BIT_AND_EXPR: |
324 | case LT_EXPR: |
325 | case LE_EXPR: |
326 | case GT_EXPR: |
327 | case GE_EXPR: |
328 | case EQ_EXPR: |
329 | case NE_EXPR: |
330 | case COMPLEX_EXPR: |
331 | case TRUTH_AND_EXPR: |
332 | case TRUTH_OR_EXPR: |
333 | case TRUTH_XOR_EXPR: |
334 | case UNORDERED_EXPR: |
335 | case ORDERED_EXPR: |
336 | case UNLT_EXPR: |
337 | case UNLE_EXPR: |
338 | case UNGT_EXPR: |
339 | case UNGE_EXPR: |
340 | case UNEQ_EXPR: |
341 | /* Binary operations evaluating both arguments (increment and |
342 | decrement are binary internally in GCC). */ |
343 | orig_op0 = op0 = TREE_OPERAND (expr, 0); |
344 | orig_op1 = op1 = TREE_OPERAND (expr, 1); |
345 | op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands, |
346 | maybe_const_itself, for_int_const, |
347 | op0_lval); |
348 | STRIP_TYPE_NOPS (op0); |
349 | /* The RHS of a MODIFY_EXPR was fully folded when building that |
350 | expression for the sake of conversion warnings. */ |
351 | if (code != MODIFY_EXPR) |
352 | op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands, |
353 | maybe_const_itself, for_int_const, false); |
354 | STRIP_TYPE_NOPS (op1); |
355 | |
356 | if (for_int_const && (TREE_CODE (op0) != INTEGER_CST |
357 | || TREE_CODE (op1) != INTEGER_CST)) |
358 | goto out; |
359 | |
360 | if (op0 != orig_op0 || op1 != orig_op1 || in_init) |
361 | ret = in_init |
362 | ? fold_build2_initializer_loc (loc, code, TREE_TYPE (expr), op0, op1) |
363 | : fold_build2_loc (loc, code, TREE_TYPE (expr), op0, op1); |
364 | else |
365 | ret = fold (expr); |
366 | if (TREE_OVERFLOW_P (ret) |
367 | && !TREE_OVERFLOW_P (op0) |
368 | && !TREE_OVERFLOW_P (op1)) |
369 | overflow_warning (EXPR_LOC_OR_LOC (expr, input_location), ret, expr); |
370 | if (code == LSHIFT_EXPR |
371 | && TREE_CODE (orig_op0) != INTEGER_CST |
372 | && TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE |
373 | && TREE_CODE (op0) == INTEGER_CST |
374 | && c_inhibit_evaluation_warnings == 0 |
375 | && tree_int_cst_sgn (op0) < 0) |
376 | warning_at (loc, OPT_Wshift_negative_value, |
377 | "left shift of negative value" ); |
378 | if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR) |
379 | && TREE_CODE (orig_op1) != INTEGER_CST |
380 | && TREE_CODE (op1) == INTEGER_CST |
381 | && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE |
382 | && c_inhibit_evaluation_warnings == 0) |
383 | { |
384 | if (tree_int_cst_sgn (op1) < 0) |
385 | warning_at (loc, OPT_Wshift_count_negative, |
386 | (code == LSHIFT_EXPR |
387 | ? G_("left shift count is negative" ) |
388 | : G_("right shift count is negative" ))); |
389 | else if ((TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE |
390 | || TREE_CODE (TREE_TYPE (orig_op0)) == FIXED_POINT_TYPE) |
391 | && compare_tree_int (op1, |
392 | TYPE_PRECISION (TREE_TYPE (orig_op0))) |
393 | >= 0) |
394 | warning_at (loc, OPT_Wshift_count_overflow, |
395 | (code == LSHIFT_EXPR |
396 | ? G_("left shift count >= width of type" ) |
397 | : G_("right shift count >= width of type" ))); |
398 | else if (TREE_CODE (TREE_TYPE (orig_op0)) == VECTOR_TYPE |
399 | && compare_tree_int (op1, |
400 | TYPE_PRECISION (TREE_TYPE (TREE_TYPE (orig_op0)))) |
401 | >= 0) |
402 | warning_at (loc, OPT_Wshift_count_overflow, |
403 | code == LSHIFT_EXPR |
404 | ? G_("left shift count >= width of vector element" ) |
405 | : G_("right shift count >= width of vector element" )); |
406 | } |
407 | if (code == LSHIFT_EXPR |
408 | /* If either OP0 has been folded to INTEGER_CST... */ |
409 | && ((TREE_CODE (orig_op0) != INTEGER_CST |
410 | && TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE |
411 | && TREE_CODE (op0) == INTEGER_CST) |
412 | /* ...or if OP1 has been folded to INTEGER_CST... */ |
413 | || (TREE_CODE (orig_op1) != INTEGER_CST |
414 | && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE |
415 | && TREE_CODE (op1) == INTEGER_CST)) |
416 | && c_inhibit_evaluation_warnings == 0) |
417 | /* ...then maybe we can detect an overflow. */ |
418 | maybe_warn_shift_overflow (loc, op0, op1); |
419 | if ((code == TRUNC_DIV_EXPR |
420 | || code == CEIL_DIV_EXPR |
421 | || code == FLOOR_DIV_EXPR |
422 | || code == EXACT_DIV_EXPR |
423 | || code == TRUNC_MOD_EXPR) |
424 | && TREE_CODE (orig_op1) != INTEGER_CST |
425 | && TREE_CODE (op1) == INTEGER_CST |
426 | && (TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE |
427 | || TREE_CODE (TREE_TYPE (orig_op0)) == FIXED_POINT_TYPE) |
428 | && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE) |
429 | warn_for_div_by_zero (loc, op1); |
430 | goto out; |
431 | |
432 | case ADDR_EXPR: |
433 | op0_lval = true; |
434 | goto unary; |
435 | case REALPART_EXPR: |
436 | case IMAGPART_EXPR: |
437 | op0_lval = lval; |
438 | /* FALLTHRU */ |
439 | case INDIRECT_REF: |
440 | case FIX_TRUNC_EXPR: |
441 | case FLOAT_EXPR: |
442 | CASE_CONVERT: |
443 | case ADDR_SPACE_CONVERT_EXPR: |
444 | case VIEW_CONVERT_EXPR: |
445 | case NON_LVALUE_EXPR: |
446 | case NEGATE_EXPR: |
447 | case BIT_NOT_EXPR: |
448 | case TRUTH_NOT_EXPR: |
449 | case CONJ_EXPR: |
450 | unary: |
451 | /* Unary operations. */ |
452 | orig_op0 = op0 = TREE_OPERAND (expr, 0); |
453 | op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands, |
454 | maybe_const_itself, for_int_const, |
455 | op0_lval); |
456 | STRIP_TYPE_NOPS (op0); |
457 | |
458 | if (for_int_const && TREE_CODE (op0) != INTEGER_CST) |
459 | goto out; |
460 | |
461 | /* ??? Cope with user tricks that amount to offsetof. The middle-end is |
462 | not prepared to deal with them if they occur in initializers. */ |
463 | if (op0 != orig_op0 |
464 | && code == ADDR_EXPR |
465 | && (op1 = get_base_address (op0)) != NULL_TREE |
466 | && INDIRECT_REF_P (op1) |
467 | && TREE_CONSTANT (TREE_OPERAND (op1, 0))) |
468 | ret = fold_convert_loc (loc, TREE_TYPE (expr), fold_offsetof_1 (op0)); |
469 | else if (op0 != orig_op0 || in_init) |
470 | ret = in_init |
471 | ? fold_build1_initializer_loc (loc, code, TREE_TYPE (expr), op0) |
472 | : fold_build1_loc (loc, code, TREE_TYPE (expr), op0); |
473 | else |
474 | ret = fold (expr); |
475 | if (code == INDIRECT_REF |
476 | && ret != expr |
477 | && INDIRECT_REF_P (ret)) |
478 | { |
479 | TREE_READONLY (ret) = TREE_READONLY (expr); |
480 | TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (expr); |
481 | TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr); |
482 | } |
483 | switch (code) |
484 | { |
485 | case FIX_TRUNC_EXPR: |
486 | case FLOAT_EXPR: |
487 | CASE_CONVERT: |
488 | /* Don't warn about explicit conversions. We will already |
489 | have warned about suspect implicit conversions. */ |
490 | break; |
491 | |
492 | default: |
493 | if (TREE_OVERFLOW_P (ret) && !TREE_OVERFLOW_P (op0)) |
494 | overflow_warning (EXPR_LOCATION (expr), ret, op0); |
495 | break; |
496 | } |
497 | goto out; |
498 | |
499 | case TRUTH_ANDIF_EXPR: |
500 | case TRUTH_ORIF_EXPR: |
501 | /* Binary operations not necessarily evaluating both |
502 | arguments. */ |
503 | orig_op0 = op0 = TREE_OPERAND (expr, 0); |
504 | orig_op1 = op1 = TREE_OPERAND (expr, 1); |
505 | op0 = c_fully_fold_internal (op0, in_init, &op0_const, &op0_const_self, |
506 | for_int_const, false); |
507 | STRIP_TYPE_NOPS (op0); |
508 | |
509 | unused_p = (op0 == (code == TRUTH_ANDIF_EXPR |
510 | ? truthvalue_false_node |
511 | : truthvalue_true_node)); |
512 | c_disable_warnings (unused_p); |
513 | op1 = c_fully_fold_internal (op1, in_init, &op1_const, &op1_const_self, |
514 | for_int_const, false); |
515 | STRIP_TYPE_NOPS (op1); |
516 | c_enable_warnings (unused_p); |
517 | |
518 | if (for_int_const |
519 | && (TREE_CODE (op0) != INTEGER_CST |
520 | /* Require OP1 be an INTEGER_CST only if it's evaluated. */ |
521 | || (!unused_p && TREE_CODE (op1) != INTEGER_CST))) |
522 | goto out; |
523 | |
524 | if (op0 != orig_op0 || op1 != orig_op1 || in_init) |
525 | ret = in_init |
526 | ? fold_build2_initializer_loc (loc, code, TREE_TYPE (expr), op0, op1) |
527 | : fold_build2_loc (loc, code, TREE_TYPE (expr), op0, op1); |
528 | else |
529 | ret = fold (expr); |
530 | *maybe_const_operands &= op0_const; |
531 | *maybe_const_itself &= op0_const_self; |
532 | if (!(flag_isoc99 |
533 | && op0_const |
534 | && op0_const_self |
535 | && (code == TRUTH_ANDIF_EXPR |
536 | ? op0 == truthvalue_false_node |
537 | : op0 == truthvalue_true_node))) |
538 | *maybe_const_operands &= op1_const; |
539 | if (!(op0_const |
540 | && op0_const_self |
541 | && (code == TRUTH_ANDIF_EXPR |
542 | ? op0 == truthvalue_false_node |
543 | : op0 == truthvalue_true_node))) |
544 | *maybe_const_itself &= op1_const_self; |
545 | goto out; |
546 | |
547 | case COND_EXPR: |
548 | orig_op0 = op0 = TREE_OPERAND (expr, 0); |
549 | orig_op1 = op1 = TREE_OPERAND (expr, 1); |
550 | orig_op2 = op2 = TREE_OPERAND (expr, 2); |
551 | op0 = c_fully_fold_internal (op0, in_init, &op0_const, &op0_const_self, |
552 | for_int_const, false); |
553 | |
554 | STRIP_TYPE_NOPS (op0); |
555 | c_disable_warnings (op0 == truthvalue_false_node); |
556 | op1 = c_fully_fold_internal (op1, in_init, &op1_const, &op1_const_self, |
557 | for_int_const, false); |
558 | STRIP_TYPE_NOPS (op1); |
559 | c_enable_warnings (op0 == truthvalue_false_node); |
560 | |
561 | c_disable_warnings (op0 == truthvalue_true_node); |
562 | op2 = c_fully_fold_internal (op2, in_init, &op2_const, &op2_const_self, |
563 | for_int_const, false); |
564 | STRIP_TYPE_NOPS (op2); |
565 | c_enable_warnings (op0 == truthvalue_true_node); |
566 | |
567 | if (for_int_const |
568 | && (TREE_CODE (op0) != INTEGER_CST |
569 | /* Only the evaluated operand must be an INTEGER_CST. */ |
570 | || (op0 == truthvalue_true_node |
571 | ? TREE_CODE (op1) != INTEGER_CST |
572 | : TREE_CODE (op2) != INTEGER_CST))) |
573 | goto out; |
574 | |
575 | if (op0 != orig_op0 || op1 != orig_op1 || op2 != orig_op2) |
576 | ret = fold_build3_loc (loc, code, TREE_TYPE (expr), op0, op1, op2); |
577 | else |
578 | ret = fold (expr); |
579 | *maybe_const_operands &= op0_const; |
580 | *maybe_const_itself &= op0_const_self; |
581 | if (!(flag_isoc99 |
582 | && op0_const |
583 | && op0_const_self |
584 | && op0 == truthvalue_false_node)) |
585 | *maybe_const_operands &= op1_const; |
586 | if (!(op0_const |
587 | && op0_const_self |
588 | && op0 == truthvalue_false_node)) |
589 | *maybe_const_itself &= op1_const_self; |
590 | if (!(flag_isoc99 |
591 | && op0_const |
592 | && op0_const_self |
593 | && op0 == truthvalue_true_node)) |
594 | *maybe_const_operands &= op2_const; |
595 | if (!(op0_const |
596 | && op0_const_self |
597 | && op0 == truthvalue_true_node)) |
598 | *maybe_const_itself &= op2_const_self; |
599 | goto out; |
600 | |
601 | case VEC_COND_EXPR: |
602 | orig_op0 = op0 = TREE_OPERAND (expr, 0); |
603 | orig_op1 = op1 = TREE_OPERAND (expr, 1); |
604 | orig_op2 = op2 = TREE_OPERAND (expr, 2); |
605 | op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands, |
606 | maybe_const_itself, for_int_const, false); |
607 | STRIP_TYPE_NOPS (op0); |
608 | op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands, |
609 | maybe_const_itself, for_int_const, false); |
610 | STRIP_TYPE_NOPS (op1); |
611 | op2 = c_fully_fold_internal (op2, in_init, maybe_const_operands, |
612 | maybe_const_itself, for_int_const, false); |
613 | STRIP_TYPE_NOPS (op2); |
614 | |
615 | if (op0 != orig_op0 || op1 != orig_op1 || op2 != orig_op2) |
616 | ret = fold_build3_loc (loc, code, TREE_TYPE (expr), op0, op1, op2); |
617 | else |
618 | ret = fold (expr); |
619 | goto out; |
620 | |
621 | case EXCESS_PRECISION_EXPR: |
622 | /* Each case where an operand with excess precision may be |
623 | encountered must remove the EXCESS_PRECISION_EXPR around |
624 | inner operands and possibly put one around the whole |
625 | expression or possibly convert to the semantic type (which |
626 | c_fully_fold does); we cannot tell at this stage which is |
627 | appropriate in any particular case. */ |
628 | gcc_unreachable (); |
629 | |
630 | case SAVE_EXPR: |
631 | /* Make sure to fold the contents of a SAVE_EXPR exactly once. */ |
632 | op0 = TREE_OPERAND (expr, 0); |
633 | if (!SAVE_EXPR_FOLDED_P (expr)) |
634 | { |
635 | op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands, |
636 | maybe_const_itself, for_int_const, |
637 | false); |
638 | TREE_OPERAND (expr, 0) = op0; |
639 | SAVE_EXPR_FOLDED_P (expr) = true; |
640 | } |
641 | /* Return the SAVE_EXPR operand if it is invariant. */ |
642 | if (tree_invariant_p (op0)) |
643 | ret = op0; |
644 | goto out; |
645 | |
646 | default: |
647 | /* Various codes may appear through folding built-in functions |
648 | and their arguments. */ |
649 | goto out; |
650 | } |
651 | |
652 | out: |
653 | /* Some folding may introduce NON_LVALUE_EXPRs; all lvalue checks |
654 | have been done by this point, so remove them again. */ |
655 | nowarning |= TREE_NO_WARNING (ret); |
656 | STRIP_TYPE_NOPS (ret); |
657 | if (nowarning && !TREE_NO_WARNING (ret)) |
658 | { |
659 | if (!CAN_HAVE_LOCATION_P (ret)) |
660 | ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret); |
661 | TREE_NO_WARNING (ret) = 1; |
662 | } |
663 | if (ret != expr) |
664 | { |
665 | protected_set_expr_location (ret, loc); |
666 | if (IS_EXPR_CODE_CLASS (kind)) |
667 | set_source_range (ret, old_range.m_start, old_range.m_finish); |
668 | } |
669 | return ret; |
670 | } |
671 | |