1 | /* Fold a constant sub-tree into a single node for C-compiler |
---|---|

2 | Copyright (C) 1987-2017 Free Software Foundation, Inc. |

3 | |

4 | This file is part of GCC. |

5 | |

6 | GCC is free software; you can redistribute it and/or modify it under |

7 | the terms of the GNU General Public License as published by the Free |

8 | Software Foundation; either version 3, or (at your option) any later |

9 | version. |

10 | |

11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |

12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |

13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |

14 | for more details. |

15 | |

16 | You should have received a copy of the GNU General Public License |

17 | along with GCC; see the file COPYING3. If not see |

18 | <http://www.gnu.org/licenses/>. */ |

19 | |

20 | /*@@ This file should be rewritten to use an arbitrary precision |

21 | @@ representation for "struct tree_int_cst" and "struct tree_real_cst". |

22 | @@ Perhaps the routines could also be used for bc/dc, and made a lib. |

23 | @@ The routines that translate from the ap rep should |

24 | @@ warn if precision et. al. is lost. |

25 | @@ This would also make life easier when this technology is used |

26 | @@ for cross-compilers. */ |

27 | |

28 | /* The entry points in this file are fold, size_int_wide and size_binop. |

29 | |

30 | fold takes a tree as argument and returns a simplified tree. |

31 | |

32 | size_binop takes a tree code for an arithmetic operation |

33 | and two operands that are trees, and produces a tree for the |

34 | result, assuming the type comes from `sizetype'. |

35 | |

36 | size_int takes an integer value, and creates a tree constant |

37 | with type from `sizetype'. |

38 | |

39 | Note: Since the folders get called on non-gimple code as well as |

40 | gimple code, we need to handle GIMPLE tuples as well as their |

41 | corresponding tree equivalents. */ |

42 | |

43 | #include "config.h" |

44 | #include "system.h" |

45 | #include "coretypes.h" |

46 | #include "backend.h" |

47 | #include "target.h" |

48 | #include "rtl.h" |

49 | #include "tree.h" |

50 | #include "gimple.h" |

51 | #include "predict.h" |

52 | #include "memmodel.h" |

53 | #include "tm_p.h" |

54 | #include "tree-ssa-operands.h" |

55 | #include "optabs-query.h" |

56 | #include "cgraph.h" |

57 | #include "diagnostic-core.h" |

58 | #include "flags.h" |

59 | #include "alias.h" |

60 | #include "fold-const.h" |

61 | #include "fold-const-call.h" |

62 | #include "stor-layout.h" |

63 | #include "calls.h" |

64 | #include "tree-iterator.h" |

65 | #include "expr.h" |

66 | #include "intl.h" |

67 | #include "langhooks.h" |

68 | #include "tree-eh.h" |

69 | #include "gimplify.h" |

70 | #include "tree-dfa.h" |

71 | #include "builtins.h" |

72 | #include "generic-match.h" |

73 | #include "gimple-fold.h" |

74 | #include "params.h" |

75 | #include "tree-into-ssa.h" |

76 | #include "md5.h" |

77 | #include "case-cfn-macros.h" |

78 | #include "stringpool.h" |

79 | #include "tree-vrp.h" |

80 | #include "tree-ssanames.h" |

81 | #include "selftest.h" |

82 | #include "stringpool.h" |

83 | #include "attribs.h" |

84 | #include "tree-vector-builder.h" |

85 | |

86 | /* Nonzero if we are folding constants inside an initializer; zero |

87 | otherwise. */ |

88 | int folding_initializer = 0; |

89 | |

90 | /* The following constants represent a bit based encoding of GCC's |

91 | comparison operators. This encoding simplifies transformations |

92 | on relational comparison operators, such as AND and OR. */ |

93 | enum comparison_code { |

94 | COMPCODE_FALSE = 0, |

95 | COMPCODE_LT = 1, |

96 | COMPCODE_EQ = 2, |

97 | COMPCODE_LE = 3, |

98 | COMPCODE_GT = 4, |

99 | COMPCODE_LTGT = 5, |

100 | COMPCODE_GE = 6, |

101 | COMPCODE_ORD = 7, |

102 | COMPCODE_UNORD = 8, |

103 | COMPCODE_UNLT = 9, |

104 | COMPCODE_UNEQ = 10, |

105 | COMPCODE_UNLE = 11, |

106 | COMPCODE_UNGT = 12, |

107 | COMPCODE_NE = 13, |

108 | COMPCODE_UNGE = 14, |

109 | COMPCODE_TRUE = 15 |

110 | }; |

111 | |

112 | static bool negate_expr_p (tree); |

113 | static tree negate_expr (tree); |

114 | static tree associate_trees (location_t, tree, tree, enum tree_code, tree); |

115 | static enum comparison_code comparison_to_compcode (enum tree_code); |

116 | static enum tree_code compcode_to_comparison (enum comparison_code); |

117 | static int twoval_comparison_p (tree, tree *, tree *, int *); |

118 | static tree eval_subst (location_t, tree, tree, tree, tree, tree); |

119 | static tree optimize_bit_field_compare (location_t, enum tree_code, |

120 | tree, tree, tree); |

121 | static int simple_operand_p (const_tree); |

122 | static bool simple_operand_p_2 (tree); |

123 | static tree range_binop (enum tree_code, tree, tree, int, tree, int); |

124 | static tree range_predecessor (tree); |

125 | static tree range_successor (tree); |

126 | static tree fold_range_test (location_t, enum tree_code, tree, tree, tree); |

127 | static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree); |

128 | static tree unextend (tree, int, int, tree); |

129 | static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *); |

130 | static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *); |

131 | static tree fold_binary_op_with_conditional_arg (location_t, |

132 | enum tree_code, tree, |

133 | tree, tree, |

134 | tree, tree, int); |

135 | static tree fold_negate_const (tree, tree); |

136 | static tree fold_not_const (const_tree, tree); |

137 | static tree fold_relational_const (enum tree_code, tree, tree, tree); |

138 | static tree fold_convert_const (enum tree_code, tree, tree); |

139 | static tree fold_view_convert_expr (tree, tree); |

140 | static tree fold_negate_expr (location_t, tree); |

141 | |

142 | |

143 | /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION. |

144 | Otherwise, return LOC. */ |

145 | |

146 | static location_t |

147 | expr_location_or (tree t, location_t loc) |

148 | { |

149 | location_t tloc = EXPR_LOCATION (t); |

150 | return tloc == UNKNOWN_LOCATION ? loc : tloc; |

151 | } |

152 | |

153 | /* Similar to protected_set_expr_location, but never modify x in place, |

154 | if location can and needs to be set, unshare it. */ |

155 | |

156 | static inline tree |

157 | protected_set_expr_location_unshare (tree x, location_t loc) |

158 | { |

159 | if (CAN_HAVE_LOCATION_P (x) |

160 | && EXPR_LOCATION (x) != loc |

161 | && !(TREE_CODE (x) == SAVE_EXPR |

162 | || TREE_CODE (x) == TARGET_EXPR |

163 | || TREE_CODE (x) == BIND_EXPR)) |

164 | { |

165 | x = copy_node (x); |

166 | SET_EXPR_LOCATION (x, loc); |

167 | } |

168 | return x; |

169 | } |

170 | |

171 | /* If ARG2 divides ARG1 with zero remainder, carries out the exact |

172 | division and returns the quotient. Otherwise returns |

173 | NULL_TREE. */ |

174 | |

175 | tree |

176 | div_if_zero_remainder (const_tree arg1, const_tree arg2) |

177 | { |

178 | widest_int quo; |

179 | |

180 | if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2), |

181 | SIGNED, &quo)) |

182 | return wide_int_to_tree (TREE_TYPE (arg1), quo); |

183 | |

184 | return NULL_TREE; |

185 | } |

186 | |

187 | /* This is nonzero if we should defer warnings about undefined |

188 | overflow. This facility exists because these warnings are a |

189 | special case. The code to estimate loop iterations does not want |

190 | to issue any warnings, since it works with expressions which do not |

191 | occur in user code. Various bits of cleanup code call fold(), but |

192 | only use the result if it has certain characteristics (e.g., is a |

193 | constant); that code only wants to issue a warning if the result is |

194 | used. */ |

195 | |

196 | static int fold_deferring_overflow_warnings; |

197 | |

198 | /* If a warning about undefined overflow is deferred, this is the |

199 | warning. Note that this may cause us to turn two warnings into |

200 | one, but that is fine since it is sufficient to only give one |

201 | warning per expression. */ |

202 | |

203 | static const char* fold_deferred_overflow_warning; |

204 | |

205 | /* If a warning about undefined overflow is deferred, this is the |

206 | level at which the warning should be emitted. */ |

207 | |

208 | static enum warn_strict_overflow_code fold_deferred_overflow_code; |

209 | |

210 | /* Start deferring overflow warnings. We could use a stack here to |

211 | permit nested calls, but at present it is not necessary. */ |

212 | |

213 | void |

214 | fold_defer_overflow_warnings (void) |

215 | { |

216 | ++fold_deferring_overflow_warnings; |

217 | } |

218 | |

219 | /* Stop deferring overflow warnings. If there is a pending warning, |

220 | and ISSUE is true, then issue the warning if appropriate. STMT is |

221 | the statement with which the warning should be associated (used for |

222 | location information); STMT may be NULL. CODE is the level of the |

223 | warning--a warn_strict_overflow_code value. This function will use |

224 | the smaller of CODE and the deferred code when deciding whether to |

225 | issue the warning. CODE may be zero to mean to always use the |

226 | deferred code. */ |

227 | |

228 | void |

229 | fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code) |

230 | { |

231 | const char *warnmsg; |

232 | location_t locus; |

233 | |

234 | gcc_assert (fold_deferring_overflow_warnings > 0); |

235 | --fold_deferring_overflow_warnings; |

236 | if (fold_deferring_overflow_warnings > 0) |

237 | { |

238 | if (fold_deferred_overflow_warning != NULL |

239 | && code != 0 |

240 | && code < (int) fold_deferred_overflow_code) |

241 | fold_deferred_overflow_code = (enum warn_strict_overflow_code) code; |

242 | return; |

243 | } |

244 | |

245 | warnmsg = fold_deferred_overflow_warning; |

246 | fold_deferred_overflow_warning = NULL; |

247 | |

248 | if (!issue || warnmsg == NULL) |

249 | return; |

250 | |

251 | if (gimple_no_warning_p (stmt)) |

252 | return; |

253 | |

254 | /* Use the smallest code level when deciding to issue the |

255 | warning. */ |

256 | if (code == 0 || code > (int) fold_deferred_overflow_code) |

257 | code = fold_deferred_overflow_code; |

258 | |

259 | if (!issue_strict_overflow_warning (code)) |

260 | return; |

261 | |

262 | if (stmt == NULL) |

263 | locus = input_location; |

264 | else |

265 | locus = gimple_location (stmt); |

266 | warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg); |

267 | } |

268 | |

269 | /* Stop deferring overflow warnings, ignoring any deferred |

270 | warnings. */ |

271 | |

272 | void |

273 | fold_undefer_and_ignore_overflow_warnings (void) |

274 | { |

275 | fold_undefer_overflow_warnings (false, NULL, 0); |

276 | } |

277 | |

278 | /* Whether we are deferring overflow warnings. */ |

279 | |

280 | bool |

281 | fold_deferring_overflow_warnings_p (void) |

282 | { |

283 | return fold_deferring_overflow_warnings > 0; |

284 | } |

285 | |

286 | /* This is called when we fold something based on the fact that signed |

287 | overflow is undefined. */ |

288 | |

289 | void |

290 | fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc) |

291 | { |

292 | if (fold_deferring_overflow_warnings > 0) |

293 | { |

294 | if (fold_deferred_overflow_warning == NULL |

295 | || wc < fold_deferred_overflow_code) |

296 | { |

297 | fold_deferred_overflow_warning = gmsgid; |

298 | fold_deferred_overflow_code = wc; |

299 | } |

300 | } |

301 | else if (issue_strict_overflow_warning (wc)) |

302 | warning (OPT_Wstrict_overflow, gmsgid); |

303 | } |

304 | |

305 | /* Return true if the built-in mathematical function specified by CODE |

306 | is odd, i.e. -f(x) == f(-x). */ |

307 | |

308 | bool |

309 | negate_mathfn_p (combined_fn fn) |

310 | { |

311 | switch (fn) |

312 | { |

313 | CASE_CFN_ASIN: |

314 | CASE_CFN_ASINH: |

315 | CASE_CFN_ATAN: |

316 | CASE_CFN_ATANH: |

317 | CASE_CFN_CASIN: |

318 | CASE_CFN_CASINH: |

319 | CASE_CFN_CATAN: |

320 | CASE_CFN_CATANH: |

321 | CASE_CFN_CBRT: |

322 | CASE_CFN_CPROJ: |

323 | CASE_CFN_CSIN: |

324 | CASE_CFN_CSINH: |

325 | CASE_CFN_CTAN: |

326 | CASE_CFN_CTANH: |

327 | CASE_CFN_ERF: |

328 | CASE_CFN_LLROUND: |

329 | CASE_CFN_LROUND: |

330 | CASE_CFN_ROUND: |

331 | CASE_CFN_SIN: |

332 | CASE_CFN_SINH: |

333 | CASE_CFN_TAN: |

334 | CASE_CFN_TANH: |

335 | CASE_CFN_TRUNC: |

336 | return true; |

337 | |

338 | CASE_CFN_LLRINT: |

339 | CASE_CFN_LRINT: |

340 | CASE_CFN_NEARBYINT: |

341 | CASE_CFN_RINT: |

342 | return !flag_rounding_math; |

343 | |

344 | default: |

345 | break; |

346 | } |

347 | return false; |

348 | } |

349 | |

350 | /* Check whether we may negate an integer constant T without causing |

351 | overflow. */ |

352 | |

353 | bool |

354 | may_negate_without_overflow_p (const_tree t) |

355 | { |

356 | tree type; |

357 | |

358 | gcc_assert (TREE_CODE (t) == INTEGER_CST); |

359 | |

360 | type = TREE_TYPE (t); |

361 | if (TYPE_UNSIGNED (type)) |

362 | return false; |

363 | |

364 | return !wi::only_sign_bit_p (wi::to_wide (t)); |

365 | } |

366 | |

367 | /* Determine whether an expression T can be cheaply negated using |

368 | the function negate_expr without introducing undefined overflow. */ |

369 | |

370 | static bool |

371 | negate_expr_p (tree t) |

372 | { |

373 | tree type; |

374 | |

375 | if (t == 0) |

376 | return false; |

377 | |

378 | type = TREE_TYPE (t); |

379 | |

380 | STRIP_SIGN_NOPS (t); |

381 | switch (TREE_CODE (t)) |

382 | { |

383 | case INTEGER_CST: |

384 | if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type)) |

385 | return true; |

386 | |

387 | /* Check that -CST will not overflow type. */ |

388 | return may_negate_without_overflow_p (t); |

389 | case BIT_NOT_EXPR: |

390 | return (INTEGRAL_TYPE_P (type) |

391 | && TYPE_OVERFLOW_WRAPS (type)); |

392 | |

393 | case FIXED_CST: |

394 | return true; |

395 | |

396 | case NEGATE_EXPR: |

397 | return !TYPE_OVERFLOW_SANITIZED (type); |

398 | |

399 | case REAL_CST: |

400 | /* We want to canonicalize to positive real constants. Pretend |

401 | that only negative ones can be easily negated. */ |

402 | return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t)); |

403 | |

404 | case COMPLEX_CST: |

405 | return negate_expr_p (TREE_REALPART (t)) |

406 | && negate_expr_p (TREE_IMAGPART (t)); |

407 | |

408 | case VECTOR_CST: |

409 | { |

410 | if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type)) |

411 | return true; |

412 | |

413 | /* Steps don't prevent negation. */ |

414 | unsigned int count = vector_cst_encoded_nelts (t); |

415 | for (unsigned int i = 0; i < count; ++i) |

416 | if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i))) |

417 | return false; |

418 | |

419 | return true; |

420 | } |

421 | |

422 | case COMPLEX_EXPR: |

423 | return negate_expr_p (TREE_OPERAND (t, 0)) |

424 | && negate_expr_p (TREE_OPERAND (t, 1)); |

425 | |

426 | case CONJ_EXPR: |

427 | return negate_expr_p (TREE_OPERAND (t, 0)); |

428 | |

429 | case PLUS_EXPR: |

430 | if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)) |

431 | || HONOR_SIGNED_ZEROS (element_mode (type)) |

432 | || (ANY_INTEGRAL_TYPE_P (type) |

433 | && ! TYPE_OVERFLOW_WRAPS (type))) |

434 | return false; |

435 | /* -(A + B) -> (-B) - A. */ |

436 | if (negate_expr_p (TREE_OPERAND (t, 1))) |

437 | return true; |

438 | /* -(A + B) -> (-A) - B. */ |

439 | return negate_expr_p (TREE_OPERAND (t, 0)); |

440 | |

441 | case MINUS_EXPR: |

442 | /* We can't turn -(A-B) into B-A when we honor signed zeros. */ |

443 | return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)) |

444 | && !HONOR_SIGNED_ZEROS (element_mode (type)) |

445 | && (! ANY_INTEGRAL_TYPE_P (type) |

446 | || TYPE_OVERFLOW_WRAPS (type)); |

447 | |

448 | case MULT_EXPR: |

449 | if (TYPE_UNSIGNED (type)) |

450 | break; |

451 | /* INT_MIN/n * n doesn't overflow while negating one operand it does |

452 | if n is a (negative) power of two. */ |

453 | if (INTEGRAL_TYPE_P (TREE_TYPE (t)) |

454 | && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t)) |

455 | && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST |

456 | && (wi::popcount |

457 | (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1) |

458 | || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST |

459 | && (wi::popcount |

460 | (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1))) |

461 | break; |

462 | |

463 | /* Fall through. */ |

464 | |

465 | case RDIV_EXPR: |

466 | if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t)))) |

467 | return negate_expr_p (TREE_OPERAND (t, 1)) |

468 | || negate_expr_p (TREE_OPERAND (t, 0)); |

469 | break; |

470 | |

471 | case TRUNC_DIV_EXPR: |

472 | case ROUND_DIV_EXPR: |

473 | case EXACT_DIV_EXPR: |

474 | if (TYPE_UNSIGNED (type)) |

475 | break; |

476 | if (negate_expr_p (TREE_OPERAND (t, 0))) |

477 | return true; |

478 | /* In general we can't negate B in A / B, because if A is INT_MIN and |

479 | B is 1, we may turn this into INT_MIN / -1 which is undefined |

480 | and actually traps on some architectures. */ |

481 | if (! INTEGRAL_TYPE_P (TREE_TYPE (t)) |

482 | || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t)) |

483 | || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST |

484 | && ! integer_onep (TREE_OPERAND (t, 1)))) |

485 | return negate_expr_p (TREE_OPERAND (t, 1)); |

486 | break; |

487 | |

488 | case NOP_EXPR: |

489 | /* Negate -((double)float) as (double)(-float). */ |

490 | if (TREE_CODE (type) == REAL_TYPE) |

491 | { |

492 | tree tem = strip_float_extensions (t); |

493 | if (tem != t) |

494 | return negate_expr_p (tem); |

495 | } |

496 | break; |

497 | |

498 | case CALL_EXPR: |

499 | /* Negate -f(x) as f(-x). */ |

500 | if (negate_mathfn_p (get_call_combined_fn (t))) |

501 | return negate_expr_p (CALL_EXPR_ARG (t, 0)); |

502 | break; |

503 | |

504 | case RSHIFT_EXPR: |

505 | /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */ |

506 | if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST) |

507 | { |

508 | tree op1 = TREE_OPERAND (t, 1); |

509 | if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1) |

510 | return true; |

511 | } |

512 | break; |

513 | |

514 | default: |

515 | break; |

516 | } |

517 | return false; |

518 | } |

519 | |

520 | /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no |

521 | simplification is possible. |

522 | If negate_expr_p would return true for T, NULL_TREE will never be |

523 | returned. */ |

524 | |

525 | static tree |

526 | fold_negate_expr_1 (location_t loc, tree t) |

527 | { |

528 | tree type = TREE_TYPE (t); |

529 | tree tem; |

530 | |

531 | switch (TREE_CODE (t)) |

532 | { |

533 | /* Convert - (~A) to A + 1. */ |

534 | case BIT_NOT_EXPR: |

535 | if (INTEGRAL_TYPE_P (type)) |

536 | return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0), |

537 | build_one_cst (type)); |

538 | break; |

539 | |

540 | case INTEGER_CST: |

541 | tem = fold_negate_const (t, type); |

542 | if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t) |

543 | || (ANY_INTEGRAL_TYPE_P (type) |

544 | && !TYPE_OVERFLOW_TRAPS (type) |

545 | && TYPE_OVERFLOW_WRAPS (type)) |

546 | || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0) |

547 | return tem; |

548 | break; |

549 | |

550 | case REAL_CST: |

551 | tem = fold_negate_const (t, type); |

552 | return tem; |

553 | |

554 | case FIXED_CST: |

555 | tem = fold_negate_const (t, type); |

556 | return tem; |

557 | |

558 | case COMPLEX_CST: |

559 | { |

560 | tree rpart = fold_negate_expr (loc, TREE_REALPART (t)); |

561 | tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t)); |

562 | if (rpart && ipart) |

563 | return build_complex (type, rpart, ipart); |

564 | } |

565 | break; |

566 | |

567 | case VECTOR_CST: |

568 | { |

569 | tree_vector_builder elts; |

570 | elts.new_unary_operation (type, t, true); |

571 | unsigned int count = elts.encoded_nelts (); |

572 | for (unsigned int i = 0; i < count; ++i) |

573 | { |

574 | tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i)); |

575 | if (elt == NULL_TREE) |

576 | return NULL_TREE; |

577 | elts.quick_push (elt); |

578 | } |

579 | |

580 | return elts.build (); |

581 | } |

582 | |

583 | case COMPLEX_EXPR: |

584 | if (negate_expr_p (t)) |

585 | return fold_build2_loc (loc, COMPLEX_EXPR, type, |

586 | fold_negate_expr (loc, TREE_OPERAND (t, 0)), |

587 | fold_negate_expr (loc, TREE_OPERAND (t, 1))); |

588 | break; |

589 | |

590 | case CONJ_EXPR: |

591 | if (negate_expr_p (t)) |

592 | return fold_build1_loc (loc, CONJ_EXPR, type, |

593 | fold_negate_expr (loc, TREE_OPERAND (t, 0))); |

594 | break; |

595 | |

596 | case NEGATE_EXPR: |

597 | if (!TYPE_OVERFLOW_SANITIZED (type)) |

598 | return TREE_OPERAND (t, 0); |

599 | break; |

600 | |

601 | case PLUS_EXPR: |

602 | if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)) |

603 | && !HONOR_SIGNED_ZEROS (element_mode (type))) |

604 | { |

605 | /* -(A + B) -> (-B) - A. */ |

606 | if (negate_expr_p (TREE_OPERAND (t, 1))) |

607 | { |

608 | tem = negate_expr (TREE_OPERAND (t, 1)); |

609 | return fold_build2_loc (loc, MINUS_EXPR, type, |

610 | tem, TREE_OPERAND (t, 0)); |

611 | } |

612 | |

613 | /* -(A + B) -> (-A) - B. */ |

614 | if (negate_expr_p (TREE_OPERAND (t, 0))) |

615 | { |

616 | tem = negate_expr (TREE_OPERAND (t, 0)); |

617 | return fold_build2_loc (loc, MINUS_EXPR, type, |

618 | tem, TREE_OPERAND (t, 1)); |

619 | } |

620 | } |

621 | break; |

622 | |

623 | case MINUS_EXPR: |

624 | /* - (A - B) -> B - A */ |

625 | if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)) |

626 | && !HONOR_SIGNED_ZEROS (element_mode (type))) |

627 | return fold_build2_loc (loc, MINUS_EXPR, type, |

628 | TREE_OPERAND (t, 1), TREE_OPERAND (t, 0)); |

629 | break; |

630 | |

631 | case MULT_EXPR: |

632 | if (TYPE_UNSIGNED (type)) |

633 | break; |

634 | |

635 | /* Fall through. */ |

636 | |

637 | case RDIV_EXPR: |

638 | if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))) |

639 | { |

640 | tem = TREE_OPERAND (t, 1); |

641 | if (negate_expr_p (tem)) |

642 | return fold_build2_loc (loc, TREE_CODE (t), type, |

643 | TREE_OPERAND (t, 0), negate_expr (tem)); |

644 | tem = TREE_OPERAND (t, 0); |

645 | if (negate_expr_p (tem)) |

646 | return fold_build2_loc (loc, TREE_CODE (t), type, |

647 | negate_expr (tem), TREE_OPERAND (t, 1)); |

648 | } |

649 | break; |

650 | |

651 | case TRUNC_DIV_EXPR: |

652 | case ROUND_DIV_EXPR: |

653 | case EXACT_DIV_EXPR: |

654 | if (TYPE_UNSIGNED (type)) |

655 | break; |

656 | if (negate_expr_p (TREE_OPERAND (t, 0))) |

657 | return fold_build2_loc (loc, TREE_CODE (t), type, |

658 | negate_expr (TREE_OPERAND (t, 0)), |

659 | TREE_OPERAND (t, 1)); |

660 | /* In general we can't negate B in A / B, because if A is INT_MIN and |

661 | B is 1, we may turn this into INT_MIN / -1 which is undefined |

662 | and actually traps on some architectures. */ |

663 | if ((! INTEGRAL_TYPE_P (TREE_TYPE (t)) |

664 | || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t)) |

665 | || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST |

666 | && ! integer_onep (TREE_OPERAND (t, 1)))) |

667 | && negate_expr_p (TREE_OPERAND (t, 1))) |

668 | return fold_build2_loc (loc, TREE_CODE (t), type, |

669 | TREE_OPERAND (t, 0), |

670 | negate_expr (TREE_OPERAND (t, 1))); |

671 | break; |

672 | |

673 | case NOP_EXPR: |

674 | /* Convert -((double)float) into (double)(-float). */ |

675 | if (TREE_CODE (type) == REAL_TYPE) |

676 | { |

677 | tem = strip_float_extensions (t); |

678 | if (tem != t && negate_expr_p (tem)) |

679 | return fold_convert_loc (loc, type, negate_expr (tem)); |

680 | } |

681 | break; |

682 | |

683 | case CALL_EXPR: |

684 | /* Negate -f(x) as f(-x). */ |

685 | if (negate_mathfn_p (get_call_combined_fn (t)) |

686 | && negate_expr_p (CALL_EXPR_ARG (t, 0))) |

687 | { |

688 | tree fndecl, arg; |

689 | |

690 | fndecl = get_callee_fndecl (t); |

691 | arg = negate_expr (CALL_EXPR_ARG (t, 0)); |

692 | return build_call_expr_loc (loc, fndecl, 1, arg); |

693 | } |

694 | break; |

695 | |

696 | case RSHIFT_EXPR: |

697 | /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */ |

698 | if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST) |

699 | { |

700 | tree op1 = TREE_OPERAND (t, 1); |

701 | if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1) |

702 | { |

703 | tree ntype = TYPE_UNSIGNED (type) |

704 | ? signed_type_for (type) |

705 | : unsigned_type_for (type); |

706 | tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0)); |

707 | temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1); |

708 | return fold_convert_loc (loc, type, temp); |

709 | } |

710 | } |

711 | break; |

712 | |

713 | default: |

714 | break; |

715 | } |

716 | |

717 | return NULL_TREE; |

718 | } |

719 | |

720 | /* A wrapper for fold_negate_expr_1. */ |

721 | |

722 | static tree |

723 | fold_negate_expr (location_t loc, tree t) |

724 | { |

725 | tree type = TREE_TYPE (t); |

726 | STRIP_SIGN_NOPS (t); |

727 | tree tem = fold_negate_expr_1 (loc, t); |

728 | if (tem == NULL_TREE) |

729 | return NULL_TREE; |

730 | return fold_convert_loc (loc, type, tem); |

731 | } |

732 | |

733 | /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be |

734 | negated in a simpler way. Also allow for T to be NULL_TREE, in which case |

735 | return NULL_TREE. */ |

736 | |

737 | static tree |

738 | negate_expr (tree t) |

739 | { |

740 | tree type, tem; |

741 | location_t loc; |

742 | |

743 | if (t == NULL_TREE) |

744 | return NULL_TREE; |

745 | |

746 | loc = EXPR_LOCATION (t); |

747 | type = TREE_TYPE (t); |

748 | STRIP_SIGN_NOPS (t); |

749 | |

750 | tem = fold_negate_expr (loc, t); |

751 | if (!tem) |

752 | tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t); |

753 | return fold_convert_loc (loc, type, tem); |

754 | } |

755 | |

756 | /* Split a tree IN into a constant, literal and variable parts that could be |

757 | combined with CODE to make IN. "constant" means an expression with |

758 | TREE_CONSTANT but that isn't an actual constant. CODE must be a |

759 | commutative arithmetic operation. Store the constant part into *CONP, |

760 | the literal in *LITP and return the variable part. If a part isn't |

761 | present, set it to null. If the tree does not decompose in this way, |

762 | return the entire tree as the variable part and the other parts as null. |

763 | |

764 | If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that |

765 | case, we negate an operand that was subtracted. Except if it is a |

766 | literal for which we use *MINUS_LITP instead. |

767 | |

768 | If NEGATE_P is true, we are negating all of IN, again except a literal |

769 | for which we use *MINUS_LITP instead. If a variable part is of pointer |

770 | type, it is negated after converting to TYPE. This prevents us from |

771 | generating illegal MINUS pointer expression. LOC is the location of |

772 | the converted variable part. |

773 | |

774 | If IN is itself a literal or constant, return it as appropriate. |

775 | |

776 | Note that we do not guarantee that any of the three values will be the |

777 | same type as IN, but they will have the same signedness and mode. */ |

778 | |

779 | static tree |

780 | split_tree (tree in, tree type, enum tree_code code, |

781 | tree *minus_varp, tree *conp, tree *minus_conp, |

782 | tree *litp, tree *minus_litp, int negate_p) |

783 | { |

784 | tree var = 0; |

785 | *minus_varp = 0; |

786 | *conp = 0; |

787 | *minus_conp = 0; |

788 | *litp = 0; |

789 | *minus_litp = 0; |

790 | |

791 | /* Strip any conversions that don't change the machine mode or signedness. */ |

792 | STRIP_SIGN_NOPS (in); |

793 | |

794 | if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST |

795 | || TREE_CODE (in) == FIXED_CST) |

796 | *litp = in; |

797 | else if (TREE_CODE (in) == code |

798 | || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math) |

799 | && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in)) |

800 | /* We can associate addition and subtraction together (even |

801 | though the C standard doesn't say so) for integers because |

802 | the value is not affected. For reals, the value might be |

803 | affected, so we can't. */ |

804 | && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR) |

805 | || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR) |

806 | || (code == MINUS_EXPR |

807 | && (TREE_CODE (in) == PLUS_EXPR |

808 | || TREE_CODE (in) == POINTER_PLUS_EXPR))))) |

809 | { |

810 | tree op0 = TREE_OPERAND (in, 0); |

811 | tree op1 = TREE_OPERAND (in, 1); |

812 | int neg1_p = TREE_CODE (in) == MINUS_EXPR; |

813 | int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0; |

814 | |

815 | /* First see if either of the operands is a literal, then a constant. */ |

816 | if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST |

817 | || TREE_CODE (op0) == FIXED_CST) |

818 | *litp = op0, op0 = 0; |

819 | else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST |

820 | || TREE_CODE (op1) == FIXED_CST) |

821 | *litp = op1, neg_litp_p = neg1_p, op1 = 0; |

822 | |

823 | if (op0 != 0 && TREE_CONSTANT (op0)) |

824 | *conp = op0, op0 = 0; |

825 | else if (op1 != 0 && TREE_CONSTANT (op1)) |

826 | *conp = op1, neg_conp_p = neg1_p, op1 = 0; |

827 | |

828 | /* If we haven't dealt with either operand, this is not a case we can |

829 | decompose. Otherwise, VAR is either of the ones remaining, if any. */ |

830 | if (op0 != 0 && op1 != 0) |

831 | var = in; |

832 | else if (op0 != 0) |

833 | var = op0; |

834 | else |

835 | var = op1, neg_var_p = neg1_p; |

836 | |

837 | /* Now do any needed negations. */ |

838 | if (neg_litp_p) |

839 | *minus_litp = *litp, *litp = 0; |

840 | if (neg_conp_p && *conp) |

841 | *minus_conp = *conp, *conp = 0; |

842 | if (neg_var_p && var) |

843 | *minus_varp = var, var = 0; |

844 | } |

845 | else if (TREE_CONSTANT (in)) |

846 | *conp = in; |

847 | else if (TREE_CODE (in) == BIT_NOT_EXPR |

848 | && code == PLUS_EXPR) |

849 | { |

850 | /* -1 - X is folded to ~X, undo that here. Do _not_ do this |

851 | when IN is constant. */ |

852 | *litp = build_minus_one_cst (type); |

853 | *minus_varp = TREE_OPERAND (in, 0); |

854 | } |

855 | else |

856 | var = in; |

857 | |

858 | if (negate_p) |

859 | { |

860 | if (*litp) |

861 | *minus_litp = *litp, *litp = 0; |

862 | else if (*minus_litp) |

863 | *litp = *minus_litp, *minus_litp = 0; |

864 | if (*conp) |

865 | *minus_conp = *conp, *conp = 0; |

866 | else if (*minus_conp) |

867 | *conp = *minus_conp, *minus_conp = 0; |

868 | if (var) |

869 | *minus_varp = var, var = 0; |

870 | else if (*minus_varp) |

871 | var = *minus_varp, *minus_varp = 0; |

872 | } |

873 | |

874 | if (*litp |

875 | && TREE_OVERFLOW_P (*litp)) |

876 | *litp = drop_tree_overflow (*litp); |

877 | if (*minus_litp |

878 | && TREE_OVERFLOW_P (*minus_litp)) |

879 | *minus_litp = drop_tree_overflow (*minus_litp); |

880 | |

881 | return var; |

882 | } |

883 | |

884 | /* Re-associate trees split by the above function. T1 and T2 are |

885 | either expressions to associate or null. Return the new |

886 | expression, if any. LOC is the location of the new expression. If |

887 | we build an operation, do it in TYPE and with CODE. */ |

888 | |

889 | static tree |

890 | associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type) |

891 | { |

892 | if (t1 == 0) |

893 | { |

894 | gcc_assert (t2 == 0 || code != MINUS_EXPR); |

895 | return t2; |

896 | } |

897 | else if (t2 == 0) |

898 | return t1; |

899 | |

900 | /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't |

901 | try to fold this since we will have infinite recursion. But do |

902 | deal with any NEGATE_EXPRs. */ |

903 | if (TREE_CODE (t1) == code || TREE_CODE (t2) == code |

904 | || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR |

905 | || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR) |

906 | { |

907 | if (code == PLUS_EXPR) |

908 | { |

909 | if (TREE_CODE (t1) == NEGATE_EXPR) |

910 | return build2_loc (loc, MINUS_EXPR, type, |

911 | fold_convert_loc (loc, type, t2), |

912 | fold_convert_loc (loc, type, |

913 | TREE_OPERAND (t1, 0))); |

914 | else if (TREE_CODE (t2) == NEGATE_EXPR) |

915 | return build2_loc (loc, MINUS_EXPR, type, |

916 | fold_convert_loc (loc, type, t1), |

917 | fold_convert_loc (loc, type, |

918 | TREE_OPERAND (t2, 0))); |

919 | else if (integer_zerop (t2)) |

920 | return fold_convert_loc (loc, type, t1); |

921 | } |

922 | else if (code == MINUS_EXPR) |

923 | { |

924 | if (integer_zerop (t2)) |

925 | return fold_convert_loc (loc, type, t1); |

926 | } |

927 | |

928 | return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1), |

929 | fold_convert_loc (loc, type, t2)); |

930 | } |

931 | |

932 | return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1), |

933 | fold_convert_loc (loc, type, t2)); |

934 | } |

935 | |

936 | /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable |

937 | for use in int_const_binop, size_binop and size_diffop. */ |

938 | |

939 | static bool |

940 | int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2) |

941 | { |

942 | if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1)) |

943 | return false; |

944 | if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2)) |

945 | return false; |

946 | |

947 | switch (code) |

948 | { |

949 | case LSHIFT_EXPR: |

950 | case RSHIFT_EXPR: |

951 | case LROTATE_EXPR: |

952 | case RROTATE_EXPR: |

953 | return true; |

954 | |

955 | default: |

956 | break; |

957 | } |

958 | |

959 | return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2) |

960 | && TYPE_PRECISION (type1) == TYPE_PRECISION (type2) |

961 | && TYPE_MODE (type1) == TYPE_MODE (type2); |

962 | } |

963 | |

964 | |

965 | /* Combine two integer constants PARG1 and PARG2 under operation CODE |

966 | to produce a new constant. Return NULL_TREE if we don't know how |

967 | to evaluate CODE at compile-time. */ |

968 | |

969 | static tree |

970 | int_const_binop_1 (enum tree_code code, const_tree parg1, const_tree parg2, |

971 | int overflowable) |

972 | { |

973 | wide_int res; |

974 | tree t; |

975 | tree type = TREE_TYPE (parg1); |

976 | signop sign = TYPE_SIGN (type); |

977 | bool overflow = false; |

978 | |

979 | wi::tree_to_wide_ref arg1 = wi::to_wide (parg1); |

980 | wide_int arg2 = wi::to_wide (parg2, TYPE_PRECISION (type)); |

981 | |

982 | switch (code) |

983 | { |

984 | case BIT_IOR_EXPR: |

985 | res = wi::bit_or (arg1, arg2); |

986 | break; |

987 | |

988 | case BIT_XOR_EXPR: |

989 | res = wi::bit_xor (arg1, arg2); |

990 | break; |

991 | |

992 | case BIT_AND_EXPR: |

993 | res = wi::bit_and (arg1, arg2); |

994 | break; |

995 | |

996 | case RSHIFT_EXPR: |

997 | case LSHIFT_EXPR: |

998 | if (wi::neg_p (arg2)) |

999 | { |

1000 | arg2 = -arg2; |

1001 | if (code == RSHIFT_EXPR) |

1002 | code = LSHIFT_EXPR; |

1003 | else |

1004 | code = RSHIFT_EXPR; |

1005 | } |

1006 | |

1007 | if (code == RSHIFT_EXPR) |

1008 | /* It's unclear from the C standard whether shifts can overflow. |

1009 | The following code ignores overflow; perhaps a C standard |

1010 | interpretation ruling is needed. */ |

1011 | res = wi::rshift (arg1, arg2, sign); |

1012 | else |

1013 | res = wi::lshift (arg1, arg2); |

1014 | break; |

1015 | |

1016 | case RROTATE_EXPR: |

1017 | case LROTATE_EXPR: |

1018 | if (wi::neg_p (arg2)) |

1019 | { |

1020 | arg2 = -arg2; |

1021 | if (code == RROTATE_EXPR) |

1022 | code = LROTATE_EXPR; |

1023 | else |

1024 | code = RROTATE_EXPR; |

1025 | } |

1026 | |

1027 | if (code == RROTATE_EXPR) |

1028 | res = wi::rrotate (arg1, arg2); |

1029 | else |

1030 | res = wi::lrotate (arg1, arg2); |

1031 | break; |

1032 | |

1033 | case PLUS_EXPR: |

1034 | res = wi::add (arg1, arg2, sign, &overflow); |

1035 | break; |

1036 | |

1037 | case MINUS_EXPR: |

1038 | res = wi::sub (arg1, arg2, sign, &overflow); |

1039 | break; |

1040 | |

1041 | case MULT_EXPR: |

1042 | res = wi::mul (arg1, arg2, sign, &overflow); |

1043 | break; |

1044 | |

1045 | case MULT_HIGHPART_EXPR: |

1046 | res = wi::mul_high (arg1, arg2, sign); |

1047 | break; |

1048 | |

1049 | case TRUNC_DIV_EXPR: |

1050 | case EXACT_DIV_EXPR: |

1051 | if (arg2 == 0) |

1052 | return NULL_TREE; |

1053 | res = wi::div_trunc (arg1, arg2, sign, &overflow); |

1054 | break; |

1055 | |

1056 | case FLOOR_DIV_EXPR: |

1057 | if (arg2 == 0) |

1058 | return NULL_TREE; |

1059 | res = wi::div_floor (arg1, arg2, sign, &overflow); |

1060 | break; |

1061 | |

1062 | case CEIL_DIV_EXPR: |

1063 | if (arg2 == 0) |

1064 | return NULL_TREE; |

1065 | res = wi::div_ceil (arg1, arg2, sign, &overflow); |

1066 | break; |

1067 | |

1068 | case ROUND_DIV_EXPR: |

1069 | if (arg2 == 0) |

1070 | return NULL_TREE; |

1071 | res = wi::div_round (arg1, arg2, sign, &overflow); |

1072 | break; |

1073 | |

1074 | case TRUNC_MOD_EXPR: |

1075 | if (arg2 == 0) |

1076 | return NULL_TREE; |

1077 | res = wi::mod_trunc (arg1, arg2, sign, &overflow); |

1078 | break; |

1079 | |

1080 | case FLOOR_MOD_EXPR: |

1081 | if (arg2 == 0) |

1082 | return NULL_TREE; |

1083 | res = wi::mod_floor (arg1, arg2, sign, &overflow); |

1084 | break; |

1085 | |

1086 | case CEIL_MOD_EXPR: |

1087 | if (arg2 == 0) |

1088 | return NULL_TREE; |

1089 | res = wi::mod_ceil (arg1, arg2, sign, &overflow); |

1090 | break; |

1091 | |

1092 | case ROUND_MOD_EXPR: |

1093 | if (arg2 == 0) |

1094 | return NULL_TREE; |

1095 | res = wi::mod_round (arg1, arg2, sign, &overflow); |

1096 | break; |

1097 | |

1098 | case MIN_EXPR: |

1099 | res = wi::min (arg1, arg2, sign); |

1100 | break; |

1101 | |

1102 | case MAX_EXPR: |

1103 | res = wi::max (arg1, arg2, sign); |

1104 | break; |

1105 | |

1106 | default: |

1107 | return NULL_TREE; |

1108 | } |

1109 | |

1110 | t = force_fit_type (type, res, overflowable, |

1111 | (((sign == SIGNED || overflowable == -1) |

1112 | && overflow) |

1113 | | TREE_OVERFLOW (parg1) | TREE_OVERFLOW (parg2))); |

1114 | |

1115 | return t; |

1116 | } |

1117 | |

1118 | tree |

1119 | int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2) |

1120 | { |

1121 | return int_const_binop_1 (code, arg1, arg2, 1); |

1122 | } |

1123 | |

1124 | /* Return true if binary operation OP distributes over addition in operand |

1125 | OPNO, with the other operand being held constant. OPNO counts from 1. */ |

1126 | |

1127 | static bool |

1128 | distributes_over_addition_p (tree_code op, int opno) |

1129 | { |

1130 | switch (op) |

1131 | { |

1132 | case PLUS_EXPR: |

1133 | case MINUS_EXPR: |

1134 | case MULT_EXPR: |

1135 | return true; |

1136 | |

1137 | case LSHIFT_EXPR: |

1138 | return opno == 1; |

1139 | |

1140 | default: |

1141 | return false; |

1142 | } |

1143 | } |

1144 | |

1145 | /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new |

1146 | constant. We assume ARG1 and ARG2 have the same data type, or at least |

1147 | are the same kind of constant and the same machine mode. Return zero if |

1148 | combining the constants is not allowed in the current operating mode. */ |

1149 | |

1150 | static tree |

1151 | const_binop (enum tree_code code, tree arg1, tree arg2) |

1152 | { |

1153 | /* Sanity check for the recursive cases. */ |

1154 | if (!arg1 || !arg2) |

1155 | return NULL_TREE; |

1156 | |

1157 | STRIP_NOPS (arg1); |

1158 | STRIP_NOPS (arg2); |

1159 | |

1160 | if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST) |

1161 | { |

1162 | if (code == POINTER_PLUS_EXPR) |

1163 | return int_const_binop (PLUS_EXPR, |

1164 | arg1, fold_convert (TREE_TYPE (arg1), arg2)); |

1165 | |

1166 | return int_const_binop (code, arg1, arg2); |

1167 | } |

1168 | |

1169 | if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST) |

1170 | { |

1171 | machine_mode mode; |

1172 | REAL_VALUE_TYPE d1; |

1173 | REAL_VALUE_TYPE d2; |

1174 | REAL_VALUE_TYPE value; |

1175 | REAL_VALUE_TYPE result; |

1176 | bool inexact; |

1177 | tree t, type; |

1178 | |

1179 | /* The following codes are handled by real_arithmetic. */ |

1180 | switch (code) |

1181 | { |

1182 | case PLUS_EXPR: |

1183 | case MINUS_EXPR: |

1184 | case MULT_EXPR: |

1185 | case RDIV_EXPR: |

1186 | case MIN_EXPR: |

1187 | case MAX_EXPR: |

1188 | break; |

1189 | |

1190 | default: |

1191 | return NULL_TREE; |

1192 | } |

1193 | |

1194 | d1 = TREE_REAL_CST (arg1); |

1195 | d2 = TREE_REAL_CST (arg2); |

1196 | |

1197 | type = TREE_TYPE (arg1); |

1198 | mode = TYPE_MODE (type); |

1199 | |

1200 | /* Don't perform operation if we honor signaling NaNs and |

1201 | either operand is a signaling NaN. */ |

1202 | if (HONOR_SNANS (mode) |

1203 | && (REAL_VALUE_ISSIGNALING_NAN (d1) |

1204 | || REAL_VALUE_ISSIGNALING_NAN (d2))) |

1205 | return NULL_TREE; |

1206 | |

1207 | /* Don't perform operation if it would raise a division |

1208 | by zero exception. */ |

1209 | if (code == RDIV_EXPR |

1210 | && real_equal (&d2, &dconst0) |

1211 | && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode))) |

1212 | return NULL_TREE; |

1213 | |

1214 | /* If either operand is a NaN, just return it. Otherwise, set up |

1215 | for floating-point trap; we return an overflow. */ |

1216 | if (REAL_VALUE_ISNAN (d1)) |

1217 | { |

1218 | /* Make resulting NaN value to be qNaN when flag_signaling_nans |

1219 | is off. */ |

1220 | d1.signalling = 0; |

1221 | t = build_real (type, d1); |

1222 | return t; |

1223 | } |

1224 | else if (REAL_VALUE_ISNAN (d2)) |

1225 | { |

1226 | /* Make resulting NaN value to be qNaN when flag_signaling_nans |

1227 | is off. */ |

1228 | d2.signalling = 0; |

1229 | t = build_real (type, d2); |

1230 | return t; |

1231 | } |

1232 | |

1233 | inexact = real_arithmetic (&value, code, &d1, &d2); |

1234 | real_convert (&result, mode, &value); |

1235 | |

1236 | /* Don't constant fold this floating point operation if |

1237 | the result has overflowed and flag_trapping_math. */ |

1238 | if (flag_trapping_math |

1239 | && MODE_HAS_INFINITIES (mode) |

1240 | && REAL_VALUE_ISINF (result) |

1241 | && !REAL_VALUE_ISINF (d1) |

1242 | && !REAL_VALUE_ISINF (d2)) |

1243 | return NULL_TREE; |

1244 | |

1245 | /* Don't constant fold this floating point operation if the |

1246 | result may dependent upon the run-time rounding mode and |

1247 | flag_rounding_math is set, or if GCC's software emulation |

1248 | is unable to accurately represent the result. */ |

1249 | if ((flag_rounding_math |

1250 | || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations)) |

1251 | && (inexact || !real_identical (&result, &value))) |

1252 | return NULL_TREE; |

1253 | |

1254 | t = build_real (type, result); |

1255 | |

1256 | TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2); |

1257 | return t; |

1258 | } |

1259 | |

1260 | if (TREE_CODE (arg1) == FIXED_CST) |

1261 | { |

1262 | FIXED_VALUE_TYPE f1; |

1263 | FIXED_VALUE_TYPE f2; |

1264 | FIXED_VALUE_TYPE result; |

1265 | tree t, type; |

1266 | int sat_p; |

1267 | bool overflow_p; |

1268 | |

1269 | /* The following codes are handled by fixed_arithmetic. */ |

1270 | switch (code) |

1271 | { |

1272 | case PLUS_EXPR: |

1273 | case MINUS_EXPR: |

1274 | case MULT_EXPR: |

1275 | case TRUNC_DIV_EXPR: |

1276 | if (TREE_CODE (arg2) != FIXED_CST) |

1277 | return NULL_TREE; |

1278 | f2 = TREE_FIXED_CST (arg2); |

1279 | break; |

1280 | |

1281 | case LSHIFT_EXPR: |

1282 | case RSHIFT_EXPR: |

1283 | { |

1284 | if (TREE_CODE (arg2) != INTEGER_CST) |

1285 | return NULL_TREE; |

1286 | wi::tree_to_wide_ref w2 = wi::to_wide (arg2); |

1287 | f2.data.high = w2.elt (1); |

1288 | f2.data.low = w2.ulow (); |

1289 | f2.mode = SImode; |

1290 | } |

1291 | break; |

1292 | |

1293 | default: |

1294 | return NULL_TREE; |

1295 | } |

1296 | |

1297 | f1 = TREE_FIXED_CST (arg1); |

1298 | type = TREE_TYPE (arg1); |

1299 | sat_p = TYPE_SATURATING (type); |

1300 | overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p); |

1301 | t = build_fixed (type, result); |

1302 | /* Propagate overflow flags. */ |

1303 | if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)) |

1304 | TREE_OVERFLOW (t) = 1; |

1305 | return t; |

1306 | } |

1307 | |

1308 | if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST) |

1309 | { |

1310 | tree type = TREE_TYPE (arg1); |

1311 | tree r1 = TREE_REALPART (arg1); |

1312 | tree i1 = TREE_IMAGPART (arg1); |

1313 | tree r2 = TREE_REALPART (arg2); |

1314 | tree i2 = TREE_IMAGPART (arg2); |

1315 | tree real, imag; |

1316 | |

1317 | switch (code) |

1318 | { |

1319 | case PLUS_EXPR: |

1320 | case MINUS_EXPR: |

1321 | real = const_binop (code, r1, r2); |

1322 | imag = const_binop (code, i1, i2); |

1323 | break; |

1324 | |

1325 | case MULT_EXPR: |

1326 | if (COMPLEX_FLOAT_TYPE_P (type)) |

1327 | return do_mpc_arg2 (arg1, arg2, type, |

1328 | /* do_nonfinite= */ folding_initializer, |

1329 | mpc_mul); |

1330 | |

1331 | real = const_binop (MINUS_EXPR, |

1332 | const_binop (MULT_EXPR, r1, r2), |

1333 | const_binop (MULT_EXPR, i1, i2)); |

1334 | imag = const_binop (PLUS_EXPR, |

1335 | const_binop (MULT_EXPR, r1, i2), |

1336 | const_binop (MULT_EXPR, i1, r2)); |

1337 | break; |

1338 | |

1339 | case RDIV_EXPR: |

1340 | if (COMPLEX_FLOAT_TYPE_P (type)) |

1341 | return do_mpc_arg2 (arg1, arg2, type, |

1342 | /* do_nonfinite= */ folding_initializer, |

1343 | mpc_div); |

1344 | /* Fallthru. */ |

1345 | case TRUNC_DIV_EXPR: |

1346 | case CEIL_DIV_EXPR: |

1347 | case FLOOR_DIV_EXPR: |

1348 | case ROUND_DIV_EXPR: |

1349 | if (flag_complex_method == 0) |

1350 | { |

1351 | /* Keep this algorithm in sync with |

1352 | tree-complex.c:expand_complex_div_straight(). |

1353 | |

1354 | Expand complex division to scalars, straightforward algorithm. |

1355 | a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t) |

1356 | t = br*br + bi*bi |

1357 | */ |

1358 | tree magsquared |

1359 | = const_binop (PLUS_EXPR, |

1360 | const_binop (MULT_EXPR, r2, r2), |

1361 | const_binop (MULT_EXPR, i2, i2)); |

1362 | tree t1 |

1363 | = const_binop (PLUS_EXPR, |

1364 | const_binop (MULT_EXPR, r1, r2), |

1365 | const_binop (MULT_EXPR, i1, i2)); |

1366 | tree t2 |

1367 | = const_binop (MINUS_EXPR, |

1368 | const_binop (MULT_EXPR, i1, r2), |

1369 | const_binop (MULT_EXPR, r1, i2)); |

1370 | |

1371 | real = const_binop (code, t1, magsquared); |

1372 | imag = const_binop (code, t2, magsquared); |

1373 | } |

1374 | else |

1375 | { |

1376 | /* Keep this algorithm in sync with |

1377 | tree-complex.c:expand_complex_div_wide(). |

1378 | |

1379 | Expand complex division to scalars, modified algorithm to minimize |

1380 | overflow with wide input ranges. */ |

1381 | tree compare = fold_build2 (LT_EXPR, boolean_type_node, |

1382 | fold_abs_const (r2, TREE_TYPE (type)), |

1383 | fold_abs_const (i2, TREE_TYPE (type))); |

1384 | |

1385 | if (integer_nonzerop (compare)) |

1386 | { |

1387 | /* In the TRUE branch, we compute |

1388 | ratio = br/bi; |

1389 | div = (br * ratio) + bi; |

1390 | tr = (ar * ratio) + ai; |

1391 | ti = (ai * ratio) - ar; |

1392 | tr = tr / div; |

1393 | ti = ti / div; */ |

1394 | tree ratio = const_binop (code, r2, i2); |

1395 | tree div = const_binop (PLUS_EXPR, i2, |

1396 | const_binop (MULT_EXPR, r2, ratio)); |

1397 | real = const_binop (MULT_EXPR, r1, ratio); |

1398 | real = const_binop (PLUS_EXPR, real, i1); |

1399 | real = const_binop (code, real, div); |

1400 | |

1401 | imag = const_binop (MULT_EXPR, i1, ratio); |

1402 | imag = const_binop (MINUS_EXPR, imag, r1); |

1403 | imag = const_binop (code, imag, div); |

1404 | } |

1405 | else |

1406 | { |

1407 | /* In the FALSE branch, we compute |

1408 | ratio = d/c; |

1409 | divisor = (d * ratio) + c; |

1410 | tr = (b * ratio) + a; |

1411 | ti = b - (a * ratio); |

1412 | tr = tr / div; |

1413 | ti = ti / div; */ |

1414 | tree ratio = const_binop (code, i2, r2); |

1415 | tree div = const_binop (PLUS_EXPR, r2, |

1416 | const_binop (MULT_EXPR, i2, ratio)); |

1417 | |

1418 | real = const_binop (MULT_EXPR, i1, ratio); |

1419 | real = const_binop (PLUS_EXPR, real, r1); |

1420 | real = const_binop (code, real, div); |

1421 | |

1422 | imag = const_binop (MULT_EXPR, r1, ratio); |

1423 | imag = const_binop (MINUS_EXPR, i1, imag); |

1424 | imag = const_binop (code, imag, div); |

1425 | } |

1426 | } |

1427 | break; |

1428 | |

1429 | default: |

1430 | return NULL_TREE; |

1431 | } |

1432 | |

1433 | if (real && imag) |

1434 | return build_complex (type, real, imag); |

1435 | } |

1436 | |

1437 | if (TREE_CODE (arg1) == VECTOR_CST |

1438 | && TREE_CODE (arg2) == VECTOR_CST |

1439 | && (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) |

1440 | == TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)))) |

1441 | { |

1442 | tree type = TREE_TYPE (arg1); |

1443 | bool step_ok_p; |

1444 | if (VECTOR_CST_STEPPED_P (arg1) |

1445 | && VECTOR_CST_STEPPED_P (arg2)) |

1446 | /* We can operate directly on the encoding if: |

1447 | |

1448 | a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1 |

1449 | implies |

1450 | (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1) |

1451 | |

1452 | Addition and subtraction are the supported operators |

1453 | for which this is true. */ |

1454 | step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR); |

1455 | else if (VECTOR_CST_STEPPED_P (arg1)) |

1456 | /* We can operate directly on stepped encodings if: |

1457 | |

1458 | a3 - a2 == a2 - a1 |

1459 | implies: |

1460 | (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c) |

1461 | |

1462 | which is true if (x -> x op c) distributes over addition. */ |

1463 | step_ok_p = distributes_over_addition_p (code, 1); |

1464 | else |

1465 | /* Similarly in reverse. */ |

1466 | step_ok_p = distributes_over_addition_p (code, 2); |

1467 | tree_vector_builder elts; |

1468 | if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p)) |

1469 | return NULL_TREE; |

1470 | unsigned int count = elts.encoded_nelts (); |

1471 | for (unsigned int i = 0; i < count; ++i) |

1472 | { |

1473 | tree elem1 = VECTOR_CST_ELT (arg1, i); |

1474 | tree elem2 = VECTOR_CST_ELT (arg2, i); |

1475 | |

1476 | tree elt = const_binop (code, elem1, elem2); |

1477 | |

1478 | /* It is possible that const_binop cannot handle the given |

1479 | code and return NULL_TREE */ |

1480 | if (elt == NULL_TREE) |

1481 | return NULL_TREE; |

1482 | elts.quick_push (elt); |

1483 | } |

1484 | |

1485 | return elts.build (); |

1486 | } |

1487 | |

1488 | /* Shifts allow a scalar offset for a vector. */ |

1489 | if (TREE_CODE (arg1) == VECTOR_CST |

1490 | && TREE_CODE (arg2) == INTEGER_CST) |

1491 | { |

1492 | tree type = TREE_TYPE (arg1); |

1493 | bool step_ok_p = distributes_over_addition_p (code, 1); |

1494 | tree_vector_builder elts; |

1495 | if (!elts.new_unary_operation (type, arg1, step_ok_p)) |

1496 | return NULL_TREE; |

1497 | unsigned int count = elts.encoded_nelts (); |

1498 | for (unsigned int i = 0; i < count; ++i) |

1499 | { |

1500 | tree elem1 = VECTOR_CST_ELT (arg1, i); |

1501 | |

1502 | tree elt = const_binop (code, elem1, arg2); |

1503 | |

1504 | /* It is possible that const_binop cannot handle the given |

1505 | code and return NULL_TREE. */ |

1506 | if (elt == NULL_TREE) |

1507 | return NULL_TREE; |

1508 | elts.quick_push (elt); |

1509 | } |

1510 | |

1511 | return elts.build (); |

1512 | } |

1513 | return NULL_TREE; |

1514 | } |

1515 | |

1516 | /* Overload that adds a TYPE parameter to be able to dispatch |

1517 | to fold_relational_const. */ |

1518 | |

1519 | tree |

1520 | const_binop (enum tree_code code, tree type, tree arg1, tree arg2) |

1521 | { |

1522 | if (TREE_CODE_CLASS (code) == tcc_comparison) |

1523 | return fold_relational_const (code, type, arg1, arg2); |

1524 | |

1525 | /* ??? Until we make the const_binop worker take the type of the |

1526 | result as argument put those cases that need it here. */ |

1527 | switch (code) |

1528 | { |

1529 | case COMPLEX_EXPR: |

1530 | if ((TREE_CODE (arg1) == REAL_CST |

1531 | && TREE_CODE (arg2) == REAL_CST) |

1532 | || (TREE_CODE (arg1) == INTEGER_CST |

1533 | && TREE_CODE (arg2) == INTEGER_CST)) |

1534 | return build_complex (type, arg1, arg2); |

1535 | return NULL_TREE; |

1536 | |

1537 | case POINTER_DIFF_EXPR: |

1538 | if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST) |

1539 | { |

1540 | offset_int res = wi::sub (wi::to_offset (arg1), |

1541 | wi::to_offset (arg2)); |

1542 | return force_fit_type (type, res, 1, |

1543 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)); |

1544 | } |

1545 | return NULL_TREE; |

1546 | |

1547 | case VEC_PACK_TRUNC_EXPR: |

1548 | case VEC_PACK_FIX_TRUNC_EXPR: |

1549 | { |

1550 | unsigned int out_nelts, in_nelts, i; |

1551 | |

1552 | if (TREE_CODE (arg1) != VECTOR_CST |

1553 | || TREE_CODE (arg2) != VECTOR_CST) |

1554 | return NULL_TREE; |

1555 | |

1556 | in_nelts = VECTOR_CST_NELTS (arg1); |

1557 | out_nelts = in_nelts * 2; |

1558 | gcc_assert (in_nelts == VECTOR_CST_NELTS (arg2) |

1559 | && out_nelts == TYPE_VECTOR_SUBPARTS (type)); |

1560 | |

1561 | tree_vector_builder elts (type, out_nelts, 1); |

1562 | for (i = 0; i < out_nelts; i++) |

1563 | { |

1564 | tree elt = (i < in_nelts |

1565 | ? VECTOR_CST_ELT (arg1, i) |

1566 | : VECTOR_CST_ELT (arg2, i - in_nelts)); |

1567 | elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR |

1568 | ? NOP_EXPR : FIX_TRUNC_EXPR, |

1569 | TREE_TYPE (type), elt); |

1570 | if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt)) |

1571 | return NULL_TREE; |

1572 | elts.quick_push (elt); |

1573 | } |

1574 | |

1575 | return elts.build (); |

1576 | } |

1577 | |

1578 | case VEC_WIDEN_MULT_LO_EXPR: |

1579 | case VEC_WIDEN_MULT_HI_EXPR: |

1580 | case VEC_WIDEN_MULT_EVEN_EXPR: |

1581 | case VEC_WIDEN_MULT_ODD_EXPR: |

1582 | { |

1583 | unsigned int out_nelts, in_nelts, out, ofs, scale; |

1584 | |

1585 | if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST) |

1586 | return NULL_TREE; |

1587 | |

1588 | in_nelts = VECTOR_CST_NELTS (arg1); |

1589 | out_nelts = in_nelts / 2; |

1590 | gcc_assert (in_nelts == VECTOR_CST_NELTS (arg2) |

1591 | && out_nelts == TYPE_VECTOR_SUBPARTS (type)); |

1592 | |

1593 | if (code == VEC_WIDEN_MULT_LO_EXPR) |

1594 | scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0; |

1595 | else if (code == VEC_WIDEN_MULT_HI_EXPR) |

1596 | scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts; |

1597 | else if (code == VEC_WIDEN_MULT_EVEN_EXPR) |

1598 | scale = 1, ofs = 0; |

1599 | else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */ |

1600 | scale = 1, ofs = 1; |

1601 | |

1602 | tree_vector_builder elts (type, out_nelts, 1); |

1603 | for (out = 0; out < out_nelts; out++) |

1604 | { |

1605 | unsigned int in = (out << scale) + ofs; |

1606 | tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), |

1607 | VECTOR_CST_ELT (arg1, in)); |

1608 | tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), |

1609 | VECTOR_CST_ELT (arg2, in)); |

1610 | |

1611 | if (t1 == NULL_TREE || t2 == NULL_TREE) |

1612 | return NULL_TREE; |

1613 | tree elt = const_binop (MULT_EXPR, t1, t2); |

1614 | if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt)) |

1615 | return NULL_TREE; |

1616 | elts.quick_push (elt); |

1617 | } |

1618 | |

1619 | return elts.build (); |

1620 | } |

1621 | |

1622 | default:; |

1623 | } |

1624 | |

1625 | if (TREE_CODE_CLASS (code) != tcc_binary) |

1626 | return NULL_TREE; |

1627 | |

1628 | /* Make sure type and arg0 have the same saturating flag. */ |

1629 | gcc_checking_assert (TYPE_SATURATING (type) |

1630 | == TYPE_SATURATING (TREE_TYPE (arg1))); |

1631 | |

1632 | return const_binop (code, arg1, arg2); |

1633 | } |

1634 | |

1635 | /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant. |

1636 | Return zero if computing the constants is not possible. */ |

1637 | |

1638 | tree |

1639 | const_unop (enum tree_code code, tree type, tree arg0) |

1640 | { |

1641 | /* Don't perform the operation, other than NEGATE and ABS, if |

1642 | flag_signaling_nans is on and the operand is a signaling NaN. */ |

1643 | if (TREE_CODE (arg0) == REAL_CST |

1644 | && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))) |

1645 | && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0)) |

1646 | && code != NEGATE_EXPR |

1647 | && code != ABS_EXPR) |

1648 | return NULL_TREE; |

1649 | |

1650 | switch (code) |

1651 | { |

1652 | CASE_CONVERT: |

1653 | case FLOAT_EXPR: |

1654 | case FIX_TRUNC_EXPR: |

1655 | case FIXED_CONVERT_EXPR: |

1656 | return fold_convert_const (code, type, arg0); |

1657 | |

1658 | case ADDR_SPACE_CONVERT_EXPR: |

1659 | /* If the source address is 0, and the source address space |

1660 | cannot have a valid object at 0, fold to dest type null. */ |

1661 | if (integer_zerop (arg0) |

1662 | && !(targetm.addr_space.zero_address_valid |

1663 | (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))))) |

1664 | return fold_convert_const (code, type, arg0); |

1665 | break; |

1666 | |

1667 | case VIEW_CONVERT_EXPR: |

1668 | return fold_view_convert_expr (type, arg0); |

1669 | |

1670 | case NEGATE_EXPR: |

1671 | { |

1672 | /* Can't call fold_negate_const directly here as that doesn't |

1673 | handle all cases and we might not be able to negate some |

1674 | constants. */ |

1675 | tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0); |

1676 | if (tem && CONSTANT_CLASS_P (tem)) |

1677 | return tem; |

1678 | break; |

1679 | } |

1680 | |

1681 | case ABS_EXPR: |

1682 | if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST) |

1683 | return fold_abs_const (arg0, type); |

1684 | break; |

1685 | |

1686 | case CONJ_EXPR: |

1687 | if (TREE_CODE (arg0) == COMPLEX_CST) |

1688 | { |

1689 | tree ipart = fold_negate_const (TREE_IMAGPART (arg0), |

1690 | TREE_TYPE (type)); |

1691 | return build_complex (type, TREE_REALPART (arg0), ipart); |

1692 | } |

1693 | break; |

1694 | |

1695 | case BIT_NOT_EXPR: |

1696 | if (TREE_CODE (arg0) == INTEGER_CST) |

1697 | return fold_not_const (arg0, type); |

1698 | /* Perform BIT_NOT_EXPR on each element individually. */ |

1699 | else if (TREE_CODE (arg0) == VECTOR_CST) |

1700 | { |

1701 | tree elem; |

1702 | |

1703 | /* This can cope with stepped encodings because ~x == -1 - x. */ |

1704 | tree_vector_builder elements; |

1705 | elements.new_unary_operation (type, arg0, true); |

1706 | unsigned int i, count = elements.encoded_nelts (); |

1707 | for (i = 0; i < count; ++i) |

1708 | { |

1709 | elem = VECTOR_CST_ELT (arg0, i); |

1710 | elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem); |

1711 | if (elem == NULL_TREE) |

1712 | break; |

1713 | elements.quick_push (elem); |

1714 | } |

1715 | if (i == count) |

1716 | return elements.build (); |

1717 | } |

1718 | break; |

1719 | |

1720 | case TRUTH_NOT_EXPR: |

1721 | if (TREE_CODE (arg0) == INTEGER_CST) |

1722 | return constant_boolean_node (integer_zerop (arg0), type); |

1723 | break; |

1724 | |

1725 | case REALPART_EXPR: |

1726 | if (TREE_CODE (arg0) == COMPLEX_CST) |

1727 | return fold_convert (type, TREE_REALPART (arg0)); |

1728 | break; |

1729 | |

1730 | case IMAGPART_EXPR: |

1731 | if (TREE_CODE (arg0) == COMPLEX_CST) |

1732 | return fold_convert (type, TREE_IMAGPART (arg0)); |

1733 | break; |

1734 | |

1735 | case VEC_UNPACK_LO_EXPR: |

1736 | case VEC_UNPACK_HI_EXPR: |

1737 | case VEC_UNPACK_FLOAT_LO_EXPR: |

1738 | case VEC_UNPACK_FLOAT_HI_EXPR: |

1739 | { |

1740 | unsigned int out_nelts, in_nelts, i; |

1741 | enum tree_code subcode; |

1742 | |

1743 | if (TREE_CODE (arg0) != VECTOR_CST) |

1744 | return NULL_TREE; |

1745 | |

1746 | in_nelts = VECTOR_CST_NELTS (arg0); |

1747 | out_nelts = in_nelts / 2; |

1748 | gcc_assert (out_nelts == TYPE_VECTOR_SUBPARTS (type)); |

1749 | |

1750 | unsigned int offset = 0; |

1751 | if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR |

1752 | || code == VEC_UNPACK_FLOAT_LO_EXPR)) |

1753 | offset = out_nelts; |

1754 | |

1755 | if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR) |

1756 | subcode = NOP_EXPR; |

1757 | else |

1758 | subcode = FLOAT_EXPR; |

1759 | |

1760 | tree_vector_builder elts (type, out_nelts, 1); |

1761 | for (i = 0; i < out_nelts; i++) |

1762 | { |

1763 | tree elt = fold_convert_const (subcode, TREE_TYPE (type), |

1764 | VECTOR_CST_ELT (arg0, i + offset)); |

1765 | if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt)) |

1766 | return NULL_TREE; |

1767 | elts.quick_push (elt); |

1768 | } |

1769 | |

1770 | return elts.build (); |

1771 | } |

1772 | |

1773 | default: |

1774 | break; |

1775 | } |

1776 | |

1777 | return NULL_TREE; |

1778 | } |

1779 | |

1780 | /* Create a sizetype INT_CST node with NUMBER sign extended. KIND |

1781 | indicates which particular sizetype to create. */ |

1782 | |

1783 | tree |

1784 | size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind) |

1785 | { |

1786 | return build_int_cst (sizetype_tab[(int) kind], number); |

1787 | } |

1788 | |

1789 | /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE |

1790 | is a tree code. The type of the result is taken from the operands. |

1791 | Both must be equivalent integer types, ala int_binop_types_match_p. |

1792 | If the operands are constant, so is the result. */ |

1793 | |

1794 | tree |

1795 | size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1) |

1796 | { |

1797 | tree type = TREE_TYPE (arg0); |

1798 | |

1799 | if (arg0 == error_mark_node || arg1 == error_mark_node) |

1800 | return error_mark_node; |

1801 | |

1802 | gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0), |

1803 | TREE_TYPE (arg1))); |

1804 | |

1805 | /* Handle the special case of two integer constants faster. */ |

1806 | if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST) |

1807 | { |

1808 | /* And some specific cases even faster than that. */ |

1809 | if (code == PLUS_EXPR) |

1810 | { |

1811 | if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0)) |

1812 | return arg1; |

1813 | if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1)) |

1814 | return arg0; |

1815 | } |

1816 | else if (code == MINUS_EXPR) |

1817 | { |

1818 | if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1)) |

1819 | return arg0; |

1820 | } |

1821 | else if (code == MULT_EXPR) |

1822 | { |

1823 | if (integer_onep (arg0) && !TREE_OVERFLOW (arg0)) |

1824 | return arg1; |

1825 | } |

1826 | |

1827 | /* Handle general case of two integer constants. For sizetype |

1828 | constant calculations we always want to know about overflow, |

1829 | even in the unsigned case. */ |

1830 | return int_const_binop_1 (code, arg0, arg1, -1); |

1831 | } |

1832 | |

1833 | return fold_build2_loc (loc, code, type, arg0, arg1); |

1834 | } |

1835 | |

1836 | /* Given two values, either both of sizetype or both of bitsizetype, |

1837 | compute the difference between the two values. Return the value |

1838 | in signed type corresponding to the type of the operands. */ |

1839 | |

1840 | tree |

1841 | size_diffop_loc (location_t loc, tree arg0, tree arg1) |

1842 | { |

1843 | tree type = TREE_TYPE (arg0); |

1844 | tree ctype; |

1845 | |

1846 | gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0), |

1847 | TREE_TYPE (arg1))); |

1848 | |

1849 | /* If the type is already signed, just do the simple thing. */ |

1850 | if (!TYPE_UNSIGNED (type)) |

1851 | return size_binop_loc (loc, MINUS_EXPR, arg0, arg1); |

1852 | |

1853 | if (type == sizetype) |

1854 | ctype = ssizetype; |

1855 | else if (type == bitsizetype) |

1856 | ctype = sbitsizetype; |

1857 | else |

1858 | ctype = signed_type_for (type); |

1859 | |

1860 | /* If either operand is not a constant, do the conversions to the signed |

1861 | type and subtract. The hardware will do the right thing with any |

1862 | overflow in the subtraction. */ |

1863 | if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST) |

1864 | return size_binop_loc (loc, MINUS_EXPR, |

1865 | fold_convert_loc (loc, ctype, arg0), |

1866 | fold_convert_loc (loc, ctype, arg1)); |

1867 | |

1868 | /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE. |

1869 | Otherwise, subtract the other way, convert to CTYPE (we know that can't |

1870 | overflow) and negate (which can't either). Special-case a result |

1871 | of zero while we're here. */ |

1872 | if (tree_int_cst_equal (arg0, arg1)) |

1873 | return build_int_cst (ctype, 0); |

1874 | else if (tree_int_cst_lt (arg1, arg0)) |

1875 | return fold_convert_loc (loc, ctype, |

1876 | size_binop_loc (loc, MINUS_EXPR, arg0, arg1)); |

1877 | else |

1878 | return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0), |

1879 | fold_convert_loc (loc, ctype, |

1880 | size_binop_loc (loc, |

1881 | MINUS_EXPR, |

1882 | arg1, arg0))); |

1883 | } |

1884 | |

1885 | /* A subroutine of fold_convert_const handling conversions of an |

1886 | INTEGER_CST to another integer type. */ |

1887 | |

1888 | static tree |

1889 | fold_convert_const_int_from_int (tree type, const_tree arg1) |

1890 | { |

1891 | /* Given an integer constant, make new constant with new type, |

1892 | appropriately sign-extended or truncated. Use widest_int |

1893 | so that any extension is done according ARG1's type. */ |

1894 | return force_fit_type (type, wi::to_widest (arg1), |

1895 | !POINTER_TYPE_P (TREE_TYPE (arg1)), |

1896 | TREE_OVERFLOW (arg1)); |

1897 | } |

1898 | |

1899 | /* A subroutine of fold_convert_const handling conversions a REAL_CST |

1900 | to an integer type. */ |

1901 | |

1902 | static tree |

1903 | fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1) |

1904 | { |

1905 | bool overflow = false; |

1906 | tree t; |

1907 | |

1908 | /* The following code implements the floating point to integer |

1909 | conversion rules required by the Java Language Specification, |

1910 | that IEEE NaNs are mapped to zero and values that overflow |

1911 | the target precision saturate, i.e. values greater than |

1912 | INT_MAX are mapped to INT_MAX, and values less than INT_MIN |

1913 | are mapped to INT_MIN. These semantics are allowed by the |

1914 | C and C++ standards that simply state that the behavior of |

1915 | FP-to-integer conversion is unspecified upon overflow. */ |

1916 | |

1917 | wide_int val; |

1918 | REAL_VALUE_TYPE r; |

1919 | REAL_VALUE_TYPE x = TREE_REAL_CST (arg1); |

1920 | |

1921 | switch (code) |

1922 | { |

1923 | case FIX_TRUNC_EXPR: |

1924 | real_trunc (&r, VOIDmode, &x); |

1925 | break; |

1926 | |

1927 | default: |

1928 | gcc_unreachable (); |

1929 | } |

1930 | |

1931 | /* If R is NaN, return zero and show we have an overflow. */ |

1932 | if (REAL_VALUE_ISNAN (r)) |

1933 | { |

1934 | overflow = true; |

1935 | val = wi::zero (TYPE_PRECISION (type)); |

1936 | } |

1937 | |

1938 | /* See if R is less than the lower bound or greater than the |

1939 | upper bound. */ |

1940 | |

1941 | if (! overflow) |

1942 | { |

1943 | tree lt = TYPE_MIN_VALUE (type); |

1944 | REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt); |

1945 | if (real_less (&r, &l)) |

1946 | { |

1947 | overflow = true; |

1948 | val = wi::to_wide (lt); |

1949 | } |

1950 | } |

1951 | |

1952 | if (! overflow) |

1953 | { |

1954 | tree ut = TYPE_MAX_VALUE (type); |

1955 | if (ut) |

1956 | { |

1957 | REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut); |

1958 | if (real_less (&u, &r)) |

1959 | { |

1960 | overflow = true; |

1961 | val = wi::to_wide (ut); |

1962 | } |

1963 | } |

1964 | } |

1965 | |

1966 | if (! overflow) |

1967 | val = real_to_integer (&r, &overflow, TYPE_PRECISION (type)); |

1968 | |

1969 | t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1)); |

1970 | return t; |

1971 | } |

1972 | |

1973 | /* A subroutine of fold_convert_const handling conversions of a |

1974 | FIXED_CST to an integer type. */ |

1975 | |

1976 | static tree |

1977 | fold_convert_const_int_from_fixed (tree type, const_tree arg1) |

1978 | { |

1979 | tree t; |

1980 | double_int temp, temp_trunc; |

1981 | scalar_mode mode; |

1982 | |

1983 | /* Right shift FIXED_CST to temp by fbit. */ |

1984 | temp = TREE_FIXED_CST (arg1).data; |

1985 | mode = TREE_FIXED_CST (arg1).mode; |

1986 | if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT) |

1987 | { |

1988 | temp = temp.rshift (GET_MODE_FBIT (mode), |

1989 | HOST_BITS_PER_DOUBLE_INT, |

1990 | SIGNED_FIXED_POINT_MODE_P (mode)); |

1991 | |

1992 | /* Left shift temp to temp_trunc by fbit. */ |

1993 | temp_trunc = temp.lshift (GET_MODE_FBIT (mode), |

1994 | HOST_BITS_PER_DOUBLE_INT, |

1995 | SIGNED_FIXED_POINT_MODE_P (mode)); |

1996 | } |

1997 | else |

1998 | { |

1999 | temp = double_int_zero; |

2000 | temp_trunc = double_int_zero; |

2001 | } |

2002 | |

2003 | /* If FIXED_CST is negative, we need to round the value toward 0. |

2004 | By checking if the fractional bits are not zero to add 1 to temp. */ |

2005 | if (SIGNED_FIXED_POINT_MODE_P (mode) |

2006 | && temp_trunc.is_negative () |

2007 | && TREE_FIXED_CST (arg1).data != temp_trunc) |

2008 | temp += double_int_one; |

2009 | |

2010 | /* Given a fixed-point constant, make new constant with new type, |

2011 | appropriately sign-extended or truncated. */ |

2012 | t = force_fit_type (type, temp, -1, |

2013 | (temp.is_negative () |

2014 | && (TYPE_UNSIGNED (type) |

2015 | < TYPE_UNSIGNED (TREE_TYPE (arg1)))) |

2016 | | TREE_OVERFLOW (arg1)); |

2017 | |

2018 | return t; |

2019 | } |

2020 | |

2021 | /* A subroutine of fold_convert_const handling conversions a REAL_CST |

2022 | to another floating point type. */ |

2023 | |

2024 | static tree |

2025 | fold_convert_const_real_from_real (tree type, const_tree arg1) |

2026 | { |

2027 | REAL_VALUE_TYPE value; |

2028 | tree t; |

2029 | |

2030 | /* Don't perform the operation if flag_signaling_nans is on |

2031 | and the operand is a signaling NaN. */ |

2032 | if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))) |

2033 | && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1))) |

2034 | return NULL_TREE; |

2035 | |

2036 | real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1)); |

2037 | t = build_real (type, value); |

2038 | |

2039 | /* If converting an infinity or NAN to a representation that doesn't |

2040 | have one, set the overflow bit so that we can produce some kind of |

2041 | error message at the appropriate point if necessary. It's not the |

2042 | most user-friendly message, but it's better than nothing. */ |

2043 | if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1)) |

2044 | && !MODE_HAS_INFINITIES (TYPE_MODE (type))) |

2045 | TREE_OVERFLOW (t) = 1; |

2046 | else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)) |

2047 | && !MODE_HAS_NANS (TYPE_MODE (type))) |

2048 | TREE_OVERFLOW (t) = 1; |

2049 | /* Regular overflow, conversion produced an infinity in a mode that |

2050 | can't represent them. */ |

2051 | else if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) |

2052 | && REAL_VALUE_ISINF (value) |

2053 | && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1))) |

2054 | TREE_OVERFLOW (t) = 1; |

2055 | else |

2056 | TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1); |

2057 | return t; |

2058 | } |

2059 | |

2060 | /* A subroutine of fold_convert_const handling conversions a FIXED_CST |

2061 | to a floating point type. */ |

2062 | |

2063 | static tree |

2064 | fold_convert_const_real_from_fixed (tree type, const_tree arg1) |

2065 | { |

2066 | REAL_VALUE_TYPE value; |

2067 | tree t; |

2068 | |

2069 | real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type), |

2070 | &TREE_FIXED_CST (arg1)); |

2071 | t = build_real (type, value); |

2072 | |

2073 | TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1); |

2074 | return t; |

2075 | } |

2076 | |

2077 | /* A subroutine of fold_convert_const handling conversions a FIXED_CST |

2078 | to another fixed-point type. */ |

2079 | |

2080 | static tree |

2081 | fold_convert_const_fixed_from_fixed (tree type, const_tree arg1) |

2082 | { |

2083 | FIXED_VALUE_TYPE value; |

2084 | tree t; |

2085 | bool overflow_p; |

2086 | |

2087 | overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type), |

2088 | &TREE_FIXED_CST (arg1), TYPE_SATURATING (type)); |

2089 | t = build_fixed (type, value); |

2090 | |

2091 | /* Propagate overflow flags. */ |

2092 | if (overflow_p | TREE_OVERFLOW (arg1)) |

2093 | TREE_OVERFLOW (t) = 1; |

2094 | return t; |

2095 | } |

2096 | |

2097 | /* A subroutine of fold_convert_const handling conversions an INTEGER_CST |

2098 | to a fixed-point type. */ |

2099 | |

2100 | static tree |

2101 | fold_convert_const_fixed_from_int (tree type, const_tree arg1) |

2102 | { |

2103 | FIXED_VALUE_TYPE value; |

2104 | tree t; |

2105 | bool overflow_p; |

2106 | double_int di; |

2107 | |

2108 | gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2); |

2109 | |

2110 | di.low = TREE_INT_CST_ELT (arg1, 0); |

2111 | if (TREE_INT_CST_NUNITS (arg1) == 1) |

2112 | di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0; |

2113 | else |

2114 | di.high = TREE_INT_CST_ELT (arg1, 1); |

2115 | |

2116 | overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di, |

2117 | TYPE_UNSIGNED (TREE_TYPE (arg1)), |

2118 | TYPE_SATURATING (type)); |

2119 | t = build_fixed (type, value); |

2120 | |

2121 | /* Propagate overflow flags. */ |

2122 | if (overflow_p | TREE_OVERFLOW (arg1)) |

2123 | TREE_OVERFLOW (t) = 1; |

2124 | return t; |

2125 | } |

2126 | |

2127 | /* A subroutine of fold_convert_const handling conversions a REAL_CST |

2128 | to a fixed-point type. */ |

2129 | |

2130 | static tree |

2131 | fold_convert_const_fixed_from_real (tree type, const_tree arg1) |

2132 | { |

2133 | FIXED_VALUE_TYPE value; |

2134 | tree t; |

2135 | bool overflow_p; |

2136 | |

2137 | overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type), |

2138 | &TREE_REAL_CST (arg1), |

2139 | TYPE_SATURATING (type)); |

2140 | t = build_fixed (type, value); |

2141 | |

2142 | /* Propagate overflow flags. */ |

2143 | if (overflow_p | TREE_OVERFLOW (arg1)) |

2144 | TREE_OVERFLOW (t) = 1; |

2145 | return t; |

2146 | } |

2147 | |

2148 | /* Attempt to fold type conversion operation CODE of expression ARG1 to |

2149 | type TYPE. If no simplification can be done return NULL_TREE. */ |

2150 | |

2151 | static tree |

2152 | fold_convert_const (enum tree_code code, tree type, tree arg1) |

2153 | { |

2154 | if (TREE_TYPE (arg1) == type) |

2155 | return arg1; |

2156 | |

2157 | if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type) |

2158 | || TREE_CODE (type) == OFFSET_TYPE) |

2159 | { |

2160 | if (TREE_CODE (arg1) == INTEGER_CST) |

2161 | return fold_convert_const_int_from_int (type, arg1); |

2162 | else if (TREE_CODE (arg1) == REAL_CST) |

2163 | return fold_convert_const_int_from_real (code, type, arg1); |

2164 | else if (TREE_CODE (arg1) == FIXED_CST) |

2165 | return fold_convert_const_int_from_fixed (type, arg1); |

2166 | } |

2167 | else if (TREE_CODE (type) == REAL_TYPE) |

2168 | { |

2169 | if (TREE_CODE (arg1) == INTEGER_CST) |

2170 | return build_real_from_int_cst (type, arg1); |

2171 | else if (TREE_CODE (arg1) == REAL_CST) |

2172 | return fold_convert_const_real_from_real (type, arg1); |

2173 | else if (TREE_CODE (arg1) == FIXED_CST) |

2174 | return fold_convert_const_real_from_fixed (type, arg1); |

2175 | } |

2176 | else if (TREE_CODE (type) == FIXED_POINT_TYPE) |

2177 | { |

2178 | if (TREE_CODE (arg1) == FIXED_CST) |

2179 | return fold_convert_const_fixed_from_fixed (type, arg1); |

2180 | else if (TREE_CODE (arg1) == INTEGER_CST) |

2181 | return fold_convert_const_fixed_from_int (type, arg1); |

2182 | else if (TREE_CODE (arg1) == REAL_CST) |

2183 | return fold_convert_const_fixed_from_real (type, arg1); |

2184 | } |

2185 | else if (TREE_CODE (type) == VECTOR_TYPE) |

2186 | { |

2187 | if (TREE_CODE (arg1) == VECTOR_CST |

2188 | && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1)) |

2189 | { |

2190 | tree elttype = TREE_TYPE (type); |

2191 | tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1)); |

2192 | /* We can't handle steps directly when extending, since the |

2193 | values need to wrap at the original precision first. */ |

2194 | bool step_ok_p |

2195 | = (INTEGRAL_TYPE_P (elttype) |

2196 | && INTEGRAL_TYPE_P (arg1_elttype) |

2197 | && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype)); |

2198 | tree_vector_builder v; |

2199 | if (!v.new_unary_operation (type, arg1, step_ok_p)) |

2200 | return NULL_TREE; |

2201 | unsigned int len = v.encoded_nelts (); |

2202 | for (unsigned int i = 0; i < len; ++i) |

2203 | { |

2204 | tree elt = VECTOR_CST_ELT (arg1, i); |

2205 | tree cvt = fold_convert_const (code, elttype, elt); |

2206 | if (cvt == NULL_TREE) |

2207 | return NULL_TREE; |

2208 | v.quick_push (cvt); |

2209 | } |

2210 | return v.build (); |

2211 | } |

2212 | } |

2213 | return NULL_TREE; |

2214 | } |

2215 | |

2216 | /* Construct a vector of zero elements of vector type TYPE. */ |

2217 | |

2218 | static tree |

2219 | build_zero_vector (tree type) |

2220 | { |

2221 | tree t; |

2222 | |

2223 | t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node); |

2224 | return build_vector_from_val (type, t); |

2225 | } |

2226 | |

2227 | /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */ |

2228 | |

2229 | bool |

2230 | fold_convertible_p (const_tree type, const_tree arg) |

2231 | { |

2232 | tree orig = TREE_TYPE (arg); |

2233 | |

2234 | if (type == orig) |

2235 | return true; |

2236 | |

2237 | if (TREE_CODE (arg) == ERROR_MARK |

2238 | || TREE_CODE (type) == ERROR_MARK |

2239 | || TREE_CODE (orig) == ERROR_MARK) |

2240 | return false; |

2241 | |

2242 | if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)) |

2243 | return true; |

2244 | |

2245 | switch (TREE_CODE (type)) |

2246 | { |

2247 | case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: |

2248 | case POINTER_TYPE: case REFERENCE_TYPE: |

2249 | case OFFSET_TYPE: |

2250 | return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig) |

2251 | || TREE_CODE (orig) == OFFSET_TYPE); |

2252 | |

2253 | case REAL_TYPE: |

2254 | case FIXED_POINT_TYPE: |

2255 | case VECTOR_TYPE: |

2256 | case VOID_TYPE: |

2257 | return TREE_CODE (type) == TREE_CODE (orig); |

2258 | |

2259 | default: |

2260 | return false; |

2261 | } |

2262 | } |

2263 | |

2264 | /* Convert expression ARG to type TYPE. Used by the middle-end for |

2265 | simple conversions in preference to calling the front-end's convert. */ |

2266 | |

2267 | tree |

2268 | fold_convert_loc (location_t loc, tree type, tree arg) |

2269 | { |

2270 | tree orig = TREE_TYPE (arg); |

2271 | tree tem; |

2272 | |

2273 | if (type == orig) |

2274 | return arg; |

2275 | |

2276 | if (TREE_CODE (arg) == ERROR_MARK |

2277 | || TREE_CODE (type) == ERROR_MARK |

2278 | || TREE_CODE (orig) == ERROR_MARK) |

2279 | return error_mark_node; |

2280 | |

2281 | switch (TREE_CODE (type)) |

2282 | { |

2283 | case POINTER_TYPE: |

2284 | case REFERENCE_TYPE: |

2285 | /* Handle conversions between pointers to different address spaces. */ |

2286 | if (POINTER_TYPE_P (orig) |

2287 | && (TYPE_ADDR_SPACE (TREE_TYPE (type)) |

2288 | != TYPE_ADDR_SPACE (TREE_TYPE (orig)))) |

2289 | return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg); |

2290 | /* fall through */ |

2291 | |

2292 | case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: |

2293 | case OFFSET_TYPE: |

2294 | if (TREE_CODE (arg) == INTEGER_CST) |

2295 | { |

2296 | tem = fold_convert_const (NOP_EXPR, type, arg); |

2297 | if (tem != NULL_TREE) |

2298 | return tem; |

2299 | } |

2300 | if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig) |

2301 | || TREE_CODE (orig) == OFFSET_TYPE) |

2302 | return fold_build1_loc (loc, NOP_EXPR, type, arg); |

2303 | if (TREE_CODE (orig) == COMPLEX_TYPE) |

2304 | return fold_convert_loc (loc, type, |

2305 | fold_build1_loc (loc, REALPART_EXPR, |

2306 | TREE_TYPE (orig), arg)); |

2307 | gcc_assert (TREE_CODE (orig) == VECTOR_TYPE |

2308 | && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig))); |

2309 | return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg); |

2310 | |

2311 | case REAL_TYPE: |

2312 | if (TREE_CODE (arg) == INTEGER_CST) |

2313 | { |

2314 | tem = fold_convert_const (FLOAT_EXPR, type, arg); |

2315 | if (tem != NULL_TREE) |

2316 | return tem; |

2317 | } |

2318 | else if (TREE_CODE (arg) == REAL_CST) |

2319 | { |

2320 | tem = fold_convert_const (NOP_EXPR, type, arg); |

2321 | if (tem != NULL_TREE) |

2322 | return tem; |

2323 | } |

2324 | else if (TREE_CODE (arg) == FIXED_CST) |

2325 | { |

2326 | tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg); |

2327 | if (tem != NULL_TREE) |

2328 | return tem; |

2329 | } |

2330 | |

2331 | switch (TREE_CODE (orig)) |

2332 | { |

2333 | case INTEGER_TYPE: |

2334 | case BOOLEAN_TYPE: case ENUMERAL_TYPE: |

2335 | case POINTER_TYPE: case REFERENCE_TYPE: |

2336 | return fold_build1_loc (loc, FLOAT_EXPR, type, arg); |

2337 | |

2338 | case REAL_TYPE: |

2339 | return fold_build1_loc (loc, NOP_EXPR, type, arg); |

2340 | |

2341 | case FIXED_POINT_TYPE: |

2342 | return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg); |

2343 | |

2344 | case COMPLEX_TYPE: |

2345 | tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg); |

2346 | return fold_convert_loc (loc, type, tem); |

2347 | |

2348 | default: |

2349 | gcc_unreachable (); |

2350 | } |

2351 | |

2352 | case FIXED_POINT_TYPE: |

2353 | if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST |

2354 | || TREE_CODE (arg) == REAL_CST) |

2355 | { |

2356 | tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg); |

2357 | if (tem != NULL_TREE) |

2358 | goto fold_convert_exit; |

2359 | } |

2360 | |

2361 | switch (TREE_CODE (orig)) |

2362 | { |

2363 | case FIXED_POINT_TYPE: |

2364 | case INTEGER_TYPE: |

2365 | case ENUMERAL_TYPE: |

2366 | case BOOLEAN_TYPE: |

2367 | case REAL_TYPE: |

2368 | return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg); |

2369 | |

2370 | case COMPLEX_TYPE: |

2371 | tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg); |

2372 | return fold_convert_loc (loc, type, tem); |

2373 | |

2374 | default: |

2375 | gcc_unreachable (); |

2376 | } |

2377 | |

2378 | case COMPLEX_TYPE: |

2379 | switch (TREE_CODE (orig)) |

2380 | { |

2381 | case INTEGER_TYPE: |

2382 | case BOOLEAN_TYPE: case ENUMERAL_TYPE: |

2383 | case POINTER_TYPE: case REFERENCE_TYPE: |

2384 | case REAL_TYPE: |

2385 | case FIXED_POINT_TYPE: |

2386 | return fold_build2_loc (loc, COMPLEX_EXPR, type, |

2387 | fold_convert_loc (loc, TREE_TYPE (type), arg), |

2388 | fold_convert_loc (loc, TREE_TYPE (type), |

2389 | integer_zero_node)); |

2390 | case COMPLEX_TYPE: |

2391 | { |

2392 | tree rpart, ipart; |

2393 | |

2394 | if (TREE_CODE (arg) == COMPLEX_EXPR) |

2395 | { |

2396 | rpart = fold_convert_loc (loc, TREE_TYPE (type), |

2397 | TREE_OPERAND (arg, 0)); |

2398 | ipart = fold_convert_loc (loc, TREE_TYPE (type), |

2399 | TREE_OPERAND (arg, 1)); |

2400 | return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart); |

2401 | } |

2402 | |

2403 | arg = save_expr (arg); |

2404 | rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg); |

2405 | ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg); |

2406 | rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart); |

2407 | ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart); |

2408 | return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart); |

2409 | } |

2410 | |

2411 | default: |

2412 | gcc_unreachable (); |

2413 | } |

2414 | |

2415 | case VECTOR_TYPE: |

2416 | if (integer_zerop (arg)) |

2417 | return build_zero_vector (type); |

2418 | gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig))); |

2419 | gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig) |

2420 | || TREE_CODE (orig) == VECTOR_TYPE); |

2421 | return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg); |

2422 | |

2423 | case VOID_TYPE: |

2424 | tem = fold_ignored_result (arg); |

2425 | return fold_build1_loc (loc, NOP_EXPR, type, tem); |

2426 | |

2427 | default: |

2428 | if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)) |

2429 | return fold_build1_loc (loc, NOP_EXPR, type, arg); |

2430 | gcc_unreachable (); |

2431 | } |

2432 | fold_convert_exit: |

2433 | protected_set_expr_location_unshare (tem, loc); |

2434 | return tem; |

2435 | } |

2436 | |

2437 | /* Return false if expr can be assumed not to be an lvalue, true |

2438 | otherwise. */ |

2439 | |

2440 | static bool |

2441 | maybe_lvalue_p (const_tree x) |

2442 | { |

2443 | /* We only need to wrap lvalue tree codes. */ |

2444 | switch (TREE_CODE (x)) |

2445 | { |

2446 | case VAR_DECL: |

2447 | case PARM_DECL: |

2448 | case RESULT_DECL: |

2449 | case LABEL_DECL: |

2450 | case FUNCTION_DECL: |

2451 | case SSA_NAME: |

2452 | |

2453 | case COMPONENT_REF: |

2454 | case MEM_REF: |

2455 | case INDIRECT_REF: |

2456 | case ARRAY_REF: |

2457 | case ARRAY_RANGE_REF: |

2458 | case BIT_FIELD_REF: |

2459 | case OBJ_TYPE_REF: |

2460 | |

2461 | case REALPART_EXPR: |

2462 | case IMAGPART_EXPR: |

2463 | case PREINCREMENT_EXPR: |

2464 | case PREDECREMENT_EXPR: |

2465 | case SAVE_EXPR: |

2466 | case TRY_CATCH_EXPR: |

2467 | case WITH_CLEANUP_EXPR: |

2468 | case COMPOUND_EXPR: |

2469 | case MODIFY_EXPR: |

2470 | case TARGET_EXPR: |

2471 | case COND_EXPR: |

2472 | case BIND_EXPR: |

2473 | break; |

2474 | |

2475 | default: |

2476 | /* Assume the worst for front-end tree codes. */ |

2477 | if ((int)TREE_CODE (x) >= NUM_TREE_CODES) |

2478 | break; |

2479 | return false; |

2480 | } |

2481 | |

2482 | return true; |

2483 | } |

2484 | |

2485 | /* Return an expr equal to X but certainly not valid as an lvalue. */ |

2486 | |

2487 | tree |

2488 | non_lvalue_loc (location_t loc, tree x) |

2489 | { |

2490 | /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to |

2491 | us. */ |

2492 | if (in_gimple_form) |

2493 | return x; |

2494 | |

2495 | if (! maybe_lvalue_p (x)) |

2496 | return x; |

2497 | return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x); |

2498 | } |

2499 | |

2500 | /* When pedantic, return an expr equal to X but certainly not valid as a |

2501 | pedantic lvalue. Otherwise, return X. */ |

2502 | |

2503 | static tree |

2504 | pedantic_non_lvalue_loc (location_t loc, tree x) |

2505 | { |

2506 | return protected_set_expr_location_unshare (x, loc); |

2507 | } |

2508 | |

2509 | /* Given a tree comparison code, return the code that is the logical inverse. |

2510 | It is generally not safe to do this for floating-point comparisons, except |

2511 | for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return |

2512 | ERROR_MARK in this case. */ |

2513 | |

2514 | enum tree_code |

2515 | invert_tree_comparison (enum tree_code code, bool honor_nans) |

2516 | { |

2517 | if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR |

2518 | && code != ORDERED_EXPR && code != UNORDERED_EXPR) |

2519 | return ERROR_MARK; |

2520 | |

2521 | switch (code) |

2522 | { |

2523 | case EQ_EXPR: |

2524 | return NE_EXPR; |

2525 | case NE_EXPR: |

2526 | return EQ_EXPR; |

2527 | case GT_EXPR: |

2528 | return honor_nans ? UNLE_EXPR : LE_EXPR; |

2529 | case GE_EXPR: |

2530 | return honor_nans ? UNLT_EXPR : LT_EXPR; |

2531 | case LT_EXPR: |

2532 | return honor_nans ? UNGE_EXPR : GE_EXPR; |

2533 | case LE_EXPR: |

2534 | return honor_nans ? UNGT_EXPR : GT_EXPR; |

2535 | case LTGT_EXPR: |

2536 | return UNEQ_EXPR; |

2537 | case UNEQ_EXPR: |

2538 | return LTGT_EXPR; |

2539 | case UNGT_EXPR: |

2540 | return LE_EXPR; |

2541 | case UNGE_EXPR: |

2542 | return LT_EXPR; |

2543 | case UNLT_EXPR: |

2544 | return GE_EXPR; |

2545 | case UNLE_EXPR: |

2546 | return GT_EXPR; |

2547 | case ORDERED_EXPR: |

2548 | return UNORDERED_EXPR; |

2549 | case UNORDERED_EXPR: |

2550 | return ORDERED_EXPR; |

2551 | default: |

2552 | gcc_unreachable (); |

2553 | } |

2554 | } |

2555 | |

2556 | /* Similar, but return the comparison that results if the operands are |

2557 | swapped. This is safe for floating-point. */ |

2558 | |

2559 | enum tree_code |

2560 | swap_tree_comparison (enum tree_code code) |

2561 | { |

2562 | switch (code) |

2563 | { |

2564 | case EQ_EXPR: |

2565 | case NE_EXPR: |

2566 | case ORDERED_EXPR: |

2567 | case UNORDERED_EXPR: |

2568 | case LTGT_EXPR: |

2569 | case UNEQ_EXPR: |

2570 | return code; |

2571 | case GT_EXPR: |

2572 | return LT_EXPR; |

2573 | case GE_EXPR: |

2574 | return LE_EXPR; |

2575 | case LT_EXPR: |

2576 | return GT_EXPR; |

2577 | case LE_EXPR: |

2578 | return GE_EXPR; |

2579 | case UNGT_EXPR: |

2580 | return UNLT_EXPR; |

2581 | case UNGE_EXPR: |

2582 | return UNLE_EXPR; |

2583 | case UNLT_EXPR: |

2584 | return UNGT_EXPR; |

2585 | case UNLE_EXPR: |

2586 | return UNGE_EXPR; |

2587 | default: |

2588 | gcc_unreachable (); |

2589 | } |

2590 | } |

2591 | |

2592 | |

2593 | /* Convert a comparison tree code from an enum tree_code representation |

2594 | into a compcode bit-based encoding. This function is the inverse of |

2595 | compcode_to_comparison. */ |

2596 | |

2597 | static enum comparison_code |

2598 | comparison_to_compcode (enum tree_code code) |

2599 | { |

2600 | switch (code) |

2601 | { |

2602 | case LT_EXPR: |

2603 | return COMPCODE_LT; |

2604 | case EQ_EXPR: |

2605 | return COMPCODE_EQ; |

2606 | case LE_EXPR: |

2607 | return COMPCODE_LE; |

2608 | case GT_EXPR: |

2609 | return COMPCODE_GT; |

2610 | case NE_EXPR: |

2611 | return COMPCODE_NE; |

2612 | case GE_EXPR: |

2613 | return COMPCODE_GE; |

2614 | case ORDERED_EXPR: |

2615 | return COMPCODE_ORD; |

2616 | case UNORDERED_EXPR: |

2617 | return COMPCODE_UNORD; |

2618 | case UNLT_EXPR: |

2619 | return COMPCODE_UNLT; |

2620 | case UNEQ_EXPR: |

2621 | return COMPCODE_UNEQ; |

2622 | case UNLE_EXPR: |

2623 | return COMPCODE_UNLE; |

2624 | case UNGT_EXPR: |

2625 | return COMPCODE_UNGT; |

2626 | case LTGT_EXPR: |

2627 | return COMPCODE_LTGT; |

2628 | case UNGE_EXPR: |

2629 | return COMPCODE_UNGE; |

2630 | default: |

2631 | gcc_unreachable (); |

2632 | } |

2633 | } |

2634 | |

2635 | /* Convert a compcode bit-based encoding of a comparison operator back |

2636 | to GCC's enum tree_code representation. This function is the |

2637 | inverse of comparison_to_compcode. */ |

2638 | |

2639 | static enum tree_code |

2640 | compcode_to_comparison (enum comparison_code code) |

2641 | { |

2642 | switch (code) |

2643 | { |

2644 | case COMPCODE_LT: |

2645 | return LT_EXPR; |

2646 | case COMPCODE_EQ: |

2647 | return EQ_EXPR; |

2648 | case COMPCODE_LE: |

2649 | return LE_EXPR; |

2650 | case COMPCODE_GT: |

2651 | return GT_EXPR; |

2652 | case COMPCODE_NE: |

2653 | return NE_EXPR; |

2654 | case COMPCODE_GE: |

2655 | return GE_EXPR; |

2656 | case COMPCODE_ORD: |

2657 | return ORDERED_EXPR; |

2658 | case COMPCODE_UNORD: |

2659 | return UNORDERED_EXPR; |

2660 | case COMPCODE_UNLT: |

2661 | return UNLT_EXPR; |

2662 | case COMPCODE_UNEQ: |

2663 | return UNEQ_EXPR; |

2664 | case COMPCODE_UNLE: |

2665 | return UNLE_EXPR; |

2666 | case COMPCODE_UNGT: |

2667 | return UNGT_EXPR; |

2668 | case COMPCODE_LTGT: |

2669 | return LTGT_EXPR; |

2670 | case COMPCODE_UNGE: |

2671 | return UNGE_EXPR; |

2672 | default: |

2673 | gcc_unreachable (); |

2674 | } |

2675 | } |

2676 | |

2677 | /* Return a tree for the comparison which is the combination of |

2678 | doing the AND or OR (depending on CODE) of the two operations LCODE |

2679 | and RCODE on the identical operands LL_ARG and LR_ARG. Take into account |

2680 | the possibility of trapping if the mode has NaNs, and return NULL_TREE |

2681 | if this makes the transformation invalid. */ |

2682 | |

2683 | tree |

2684 | combine_comparisons (location_t loc, |

2685 | enum tree_code code, enum tree_code lcode, |

2686 | enum tree_code rcode, tree truth_type, |

2687 | tree ll_arg, tree lr_arg) |

2688 | { |

2689 | bool honor_nans = HONOR_NANS (ll_arg); |

2690 | enum comparison_code lcompcode = comparison_to_compcode (lcode); |

2691 | enum comparison_code rcompcode = comparison_to_compcode (rcode); |

2692 | int compcode; |

2693 | |

2694 | switch (code) |

2695 | { |

2696 | case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR: |

2697 | compcode = lcompcode & rcompcode; |

2698 | break; |

2699 | |

2700 | case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR: |

2701 | compcode = lcompcode | rcompcode; |

2702 | break; |

2703 | |

2704 | default: |

2705 | return NULL_TREE; |

2706 | } |

2707 | |

2708 | if (!honor_nans) |

2709 | { |

2710 | /* Eliminate unordered comparisons, as well as LTGT and ORD |

2711 | which are not used unless the mode has NaNs. */ |

2712 | compcode &= ~COMPCODE_UNORD; |

2713 | if (compcode == COMPCODE_LTGT) |

2714 | compcode = COMPCODE_NE; |

2715 | else if (compcode == COMPCODE_ORD) |

2716 | compcode = COMPCODE_TRUE; |

2717 | } |

2718 | else if (flag_trapping_math) |

2719 | { |

2720 | /* Check that the original operation and the optimized ones will trap |

2721 | under the same condition. */ |

2722 | bool ltrap = (lcompcode & COMPCODE_UNORD) == 0 |

2723 | && (lcompcode != COMPCODE_EQ) |

2724 | && (lcompcode != COMPCODE_ORD); |

2725 | bool rtrap = (rcompcode & COMPCODE_UNORD) == 0 |

2726 | && (rcompcode != COMPCODE_EQ) |

2727 | && (rcompcode != COMPCODE_ORD); |

2728 | bool trap = (compcode & COMPCODE_UNORD) == 0 |

2729 | && (compcode != COMPCODE_EQ) |

2730 | && (compcode != COMPCODE_ORD); |

2731 | |

2732 | /* In a short-circuited boolean expression the LHS might be |

2733 | such that the RHS, if evaluated, will never trap. For |

2734 | example, in ORD (x, y) && (x < y), we evaluate the RHS only |

2735 | if neither x nor y is NaN. (This is a mixed blessing: for |

2736 | example, the expression above will never trap, hence |

2737 | optimizing it to x < y would be invalid). */ |

2738 | if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD)) |

2739 | || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD))) |

2740 | rtrap = false; |

2741 | |

2742 | /* If the comparison was short-circuited, and only the RHS |

2743 | trapped, we may now generate a spurious trap. */ |

2744 | if (rtrap && !ltrap |

2745 | && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)) |

2746 | return NULL_TREE; |

2747 | |

2748 | /* If we changed the conditions that cause a trap, we lose. */ |

2749 | if ((ltrap || rtrap) != trap) |

2750 | return NULL_TREE; |

2751 | } |

2752 | |

2753 | if (compcode == COMPCODE_TRUE) |

2754 | return constant_boolean_node (true, truth_type); |

2755 | else if (compcode == COMPCODE_FALSE) |

2756 | return constant_boolean_node (false, truth_type); |

2757 | else |

2758 | { |

2759 | enum tree_code tcode; |

2760 | |

2761 | tcode = compcode_to_comparison ((enum comparison_code) compcode); |

2762 | return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg); |

2763 | } |

2764 | } |

2765 | |

2766 | /* Return nonzero if two operands (typically of the same tree node) |

2767 | are necessarily equal. FLAGS modifies behavior as follows: |

2768 | |

2769 | If OEP_ONLY_CONST is set, only return nonzero for constants. |

2770 | This function tests whether the operands are indistinguishable; |

2771 | it does not test whether they are equal using C's == operation. |

2772 | The distinction is important for IEEE floating point, because |

2773 | (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and |

2774 | (2) two NaNs may be indistinguishable, but NaN!=NaN. |

2775 | |

2776 | If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself |

2777 | even though it may hold multiple values during a function. |

2778 | This is because a GCC tree node guarantees that nothing else is |

2779 | executed between the evaluation of its "operands" (which may often |

2780 | be evaluated in arbitrary order). Hence if the operands themselves |

2781 | don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the |

2782 | same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST |

2783 | unset means assuming isochronic (or instantaneous) tree equivalence. |

2784 | Unless comparing arbitrary expression trees, such as from different |

2785 | statements, this flag can usually be left unset. |

2786 | |

2787 | If OEP_PURE_SAME is set, then pure functions with identical arguments |

2788 | are considered the same. It is used when the caller has other ways |

2789 | to ensure that global memory is unchanged in between. |

2790 | |

2791 | If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects, |

2792 | not values of expressions. |

2793 | |

2794 | If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects |

2795 | such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs. |

2796 | |

2797 | Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on |

2798 | any operand with side effect. This is unnecesarily conservative in the |

2799 | case we know that arg0 and arg1 are in disjoint code paths (such as in |

2800 | ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing |

2801 | addresses with TREE_CONSTANT flag set so we know that &var == &var |

2802 | even if var is volatile. */ |

2803 | |

2804 | int |

2805 | operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags) |

2806 | { |

2807 | /* When checking, verify at the outermost operand_equal_p call that |

2808 | if operand_equal_p returns non-zero then ARG0 and ARG1 has the same |

2809 | hash value. */ |

2810 | if (flag_checking && !(flags & OEP_NO_HASH_CHECK)) |

2811 | { |

2812 | if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK)) |

2813 | { |

2814 | if (arg0 != arg1) |

2815 | { |

2816 | inchash::hash hstate0 (0), hstate1 (0); |

2817 | inchash::add_expr (arg0, hstate0, flags | OEP_HASH_CHECK); |

2818 | inchash::add_expr (arg1, hstate1, flags | OEP_HASH_CHECK); |

2819 | hashval_t h0 = hstate0.end (); |

2820 | hashval_t h1 = hstate1.end (); |

2821 | gcc_assert (h0 == h1); |

2822 | } |

2823 | return 1; |

2824 | } |

2825 | else |

2826 | return 0; |

2827 | } |

2828 | |

2829 | /* If either is ERROR_MARK, they aren't equal. */ |

2830 | if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK |

2831 | || TREE_TYPE (arg0) == error_mark_node |

2832 | || TREE_TYPE (arg1) == error_mark_node) |

2833 | return 0; |

2834 | |

2835 | /* Similar, if either does not have a type (like a released SSA name), |

2836 | they aren't equal. */ |

2837 | if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1)) |

2838 | return 0; |

2839 | |

2840 | /* We cannot consider pointers to different address space equal. */ |

2841 | if (POINTER_TYPE_P (TREE_TYPE (arg0)) |

2842 | && POINTER_TYPE_P (TREE_TYPE (arg1)) |

2843 | && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))) |

2844 | != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1))))) |

2845 | return 0; |

2846 | |

2847 | /* Check equality of integer constants before bailing out due to |

2848 | precision differences. */ |

2849 | if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST) |

2850 | { |

2851 | /* Address of INTEGER_CST is not defined; check that we did not forget |

2852 | to drop the OEP_ADDRESS_OF flags. */ |

2853 | gcc_checking_assert (!(flags & OEP_ADDRESS_OF)); |

2854 | return tree_int_cst_equal (arg0, arg1); |

2855 | } |

2856 | |

2857 | if (!(flags & OEP_ADDRESS_OF)) |

2858 | { |

2859 | /* If both types don't have the same signedness, then we can't consider |

2860 | them equal. We must check this before the STRIP_NOPS calls |

2861 | because they may change the signedness of the arguments. As pointers |

2862 | strictly don't have a signedness, require either two pointers or |

2863 | two non-pointers as well. */ |

2864 | if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)) |

2865 | || POINTER_TYPE_P (TREE_TYPE (arg0)) |

2866 | != POINTER_TYPE_P (TREE_TYPE (arg1))) |

2867 | return 0; |

2868 | |

2869 | /* If both types don't have the same precision, then it is not safe |

2870 | to strip NOPs. */ |

2871 | if (element_precision (TREE_TYPE (arg0)) |

2872 | != element_precision (TREE_TYPE (arg1))) |

2873 | return 0; |

2874 | |

2875 | STRIP_NOPS (arg0); |

2876 | STRIP_NOPS (arg1); |

2877 | } |

2878 | #if 0 |

2879 | /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the |

2880 | sanity check once the issue is solved. */ |

2881 | else |

2882 | /* Addresses of conversions and SSA_NAMEs (and many other things) |

2883 | are not defined. Check that we did not forget to drop the |

2884 | OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */ |

2885 | gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1) |

2886 | && TREE_CODE (arg0) != SSA_NAME); |

2887 | #endif |

2888 | |

2889 | /* In case both args are comparisons but with different comparison |

2890 | code, try to swap the comparison operands of one arg to produce |

2891 | a match and compare that variant. */ |

2892 | if (TREE_CODE (arg0) != TREE_CODE (arg1) |

2893 | && COMPARISON_CLASS_P (arg0) |

2894 | && COMPARISON_CLASS_P (arg1)) |

2895 | { |

2896 | enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1)); |

2897 | |

2898 | if (TREE_CODE (arg0) == swap_code) |

2899 | return operand_equal_p (TREE_OPERAND (arg0, 0), |

2900 | TREE_OPERAND (arg1, 1), flags) |

2901 | && operand_equal_p (TREE_OPERAND (arg0, 1), |

2902 | TREE_OPERAND (arg1, 0), flags); |

2903 | } |

2904 | |

2905 | if (TREE_CODE (arg0) != TREE_CODE (arg1)) |

2906 | { |

2907 | /* NOP_EXPR and CONVERT_EXPR are considered equal. */ |

2908 | if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)) |

2909 | ; |

2910 | else if (flags & OEP_ADDRESS_OF) |

2911 | { |

2912 | /* If we are interested in comparing addresses ignore |

2913 | MEM_REF wrappings of the base that can appear just for |

2914 | TBAA reasons. */ |

2915 | if (TREE_CODE (arg0) == MEM_REF |

2916 | && DECL_P (arg1) |

2917 | && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR |

2918 | && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1 |

2919 | && integer_zerop (TREE_OPERAND (arg0, 1))) |

2920 | return 1; |

2921 | else if (TREE_CODE (arg1) == MEM_REF |

2922 | && DECL_P (arg0) |

2923 | && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR |

2924 | && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0 |

2925 | && integer_zerop (TREE_OPERAND (arg1, 1))) |

2926 | return 1; |

2927 | return 0; |

2928 | } |

2929 | else |

2930 | return 0; |

2931 | } |

2932 | |

2933 | /* When not checking adddresses, this is needed for conversions and for |

2934 | COMPONENT_REF. Might as well play it safe and always test this. */ |

2935 | if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK |

2936 | || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK |

2937 | || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)) |

2938 | && !(flags & OEP_ADDRESS_OF))) |

2939 | return 0; |

2940 | |

2941 | /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal. |

2942 | We don't care about side effects in that case because the SAVE_EXPR |

2943 | takes care of that for us. In all other cases, two expressions are |

2944 | equal if they have no side effects. If we have two identical |

2945 | expressions with side effects that should be treated the same due |

2946 | to the only side effects being identical SAVE_EXPR's, that will |

2947 | be detected in the recursive calls below. |

2948 | If we are taking an invariant address of two identical objects |

2949 | they are necessarily equal as well. */ |

2950 | if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST) |

2951 | && (TREE_CODE (arg0) == SAVE_EXPR |

2952 | || (flags & OEP_MATCH_SIDE_EFFECTS) |

2953 | || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1)))) |

2954 | return 1; |

2955 | |

2956 | /* Next handle constant cases, those for which we can return 1 even |

2957 | if ONLY_CONST is set. */ |

2958 | if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)) |

2959 | switch (TREE_CODE (arg0)) |

2960 | { |

2961 | case INTEGER_CST: |

2962 | return tree_int_cst_equal (arg0, arg1); |

2963 | |

2964 | case FIXED_CST: |

2965 | return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0), |

2966 | TREE_FIXED_CST (arg1)); |

2967 | |

2968 | case REAL_CST: |

2969 | if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1))) |

2970 | return 1; |

2971 | |

2972 | |

2973 | if (!HONOR_SIGNED_ZEROS (arg0)) |

2974 | { |

2975 | /* If we do not distinguish between signed and unsigned zero, |

2976 | consider them equal. */ |

2977 | if (real_zerop (arg0) && real_zerop (arg1)) |

2978 | return 1; |

2979 | } |

2980 | return 0; |

2981 | |

2982 | case VECTOR_CST: |

2983 | { |

2984 | if (VECTOR_CST_LOG2_NPATTERNS (arg0) |

2985 | != VECTOR_CST_LOG2_NPATTERNS (arg1)) |

2986 | return 0; |

2987 | |

2988 | if (VECTOR_CST_NELTS_PER_PATTERN (arg0) |

2989 | != VECTOR_CST_NELTS_PER_PATTERN (arg1)) |

2990 | return 0; |

2991 | |

2992 | unsigned int count = vector_cst_encoded_nelts (arg0); |

2993 | for (unsigned int i = 0; i < count; ++i) |

2994 | if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i), |

2995 | VECTOR_CST_ENCODED_ELT (arg1, i), flags)) |

2996 | return 0; |

2997 | return 1; |

2998 | } |

2999 | |

3000 | case COMPLEX_CST: |

3001 | return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1), |

3002 | flags) |

3003 | && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1), |

3004 | flags)); |

3005 | |

3006 | case STRING_CST: |

3007 | return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1) |

3008 | && ! memcmp (TREE_STRING_POINTER (arg0), |

3009 | TREE_STRING_POINTER (arg1), |

3010 | TREE_STRING_LENGTH (arg0))); |

3011 | |

3012 | case ADDR_EXPR: |

3013 | gcc_checking_assert (!(flags & OEP_ADDRESS_OF)); |

3014 | return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), |

3015 | flags | OEP_ADDRESS_OF |

3016 | | OEP_MATCH_SIDE_EFFECTS); |

3017 | case CONSTRUCTOR: |

3018 | /* In GIMPLE empty constructors are allowed in initializers of |

3019 | aggregates. */ |

3020 | return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1); |

3021 | default: |

3022 | break; |

3023 | } |

3024 | |

3025 | if (flags & OEP_ONLY_CONST) |

3026 | return 0; |

3027 | |

3028 | /* Define macros to test an operand from arg0 and arg1 for equality and a |

3029 | variant that allows null and views null as being different from any |

3030 | non-null value. In the latter case, if either is null, the both |

3031 | must be; otherwise, do the normal comparison. */ |

3032 | #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \ |

3033 | TREE_OPERAND (arg1, N), flags) |

3034 | |

3035 | #define OP_SAME_WITH_NULL(N) \ |

3036 | ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \ |

3037 | ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N)) |

3038 | |

3039 | switch (TREE_CODE_CLASS (TREE_CODE (arg0))) |

3040 | { |

3041 | case tcc_unary: |

3042 | /* Two conversions are equal only if signedness and modes match. */ |

3043 | switch (TREE_CODE (arg0)) |

3044 | { |

3045 | CASE_CONVERT: |

3046 | case FIX_TRUNC_EXPR: |

3047 | if (TYPE_UNSIGNED (TREE_TYPE (arg0)) |

3048 | != TYPE_UNSIGNED (TREE_TYPE (arg1))) |

3049 | return 0; |

3050 | break; |

3051 | default: |

3052 | break; |

3053 | } |

3054 | |

3055 | return OP_SAME (0); |

3056 | |

3057 | |

3058 | case tcc_comparison: |

3059 | case tcc_binary: |

3060 | if (OP_SAME (0) && OP_SAME (1)) |

3061 | return 1; |

3062 | |

3063 | /* For commutative ops, allow the other order. */ |

3064 | return (commutative_tree_code (TREE_CODE (arg0)) |

3065 | && operand_equal_p (TREE_OPERAND (arg0, 0), |

3066 | TREE_OPERAND (arg1, 1), flags) |

3067 | && operand_equal_p (TREE_OPERAND (arg0, 1), |

3068 | TREE_OPERAND (arg1, 0), flags)); |

3069 | |

3070 | case tcc_reference: |

3071 | /* If either of the pointer (or reference) expressions we are |

3072 | dereferencing contain a side effect, these cannot be equal, |

3073 | but their addresses can be. */ |

3074 | if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0 |

3075 | && (TREE_SIDE_EFFECTS (arg0) |

3076 | || TREE_SIDE_EFFECTS (arg1))) |

3077 | return 0; |

3078 | |

3079 | switch (TREE_CODE (arg0)) |

3080 | { |

3081 | case INDIRECT_REF: |

3082 | if (!(flags & OEP_ADDRESS_OF) |

3083 | && (TYPE_ALIGN (TREE_TYPE (arg0)) |

3084 | != TYPE_ALIGN (TREE_TYPE (arg1)))) |

3085 | return 0; |

3086 | flags &= ~OEP_ADDRESS_OF; |

3087 | return OP_SAME (0); |

3088 | |

3089 | case IMAGPART_EXPR: |

3090 | /* Require the same offset. */ |

3091 | if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)), |

3092 | TYPE_SIZE (TREE_TYPE (arg1)), |

3093 | flags & ~OEP_ADDRESS_OF)) |

3094 | return 0; |

3095 | |

3096 | /* Fallthru. */ |

3097 | case REALPART_EXPR: |

3098 | case VIEW_CONVERT_EXPR: |

3099 | return OP_SAME (0); |

3100 | |

3101 | case TARGET_MEM_REF: |

3102 | case MEM_REF: |

3103 | if (!(flags & OEP_ADDRESS_OF)) |

3104 | { |

3105 | /* Require equal access sizes */ |

3106 | if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1)) |

3107 | && (!TYPE_SIZE (TREE_TYPE (arg0)) |

3108 | || !TYPE_SIZE (TREE_TYPE (arg1)) |

3109 | || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)), |

3110 | TYPE_SIZE (TREE_TYPE (arg1)), |

3111 | flags))) |

3112 | return 0; |

3113 | /* Verify that access happens in similar types. */ |

3114 | if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))) |

3115 | return 0; |

3116 | /* Verify that accesses are TBAA compatible. */ |

3117 | if (!alias_ptr_types_compatible_p |

3118 | (TREE_TYPE (TREE_OPERAND (arg0, 1)), |

3119 | TREE_TYPE (TREE_OPERAND (arg1, 1))) |

3120 | || (MR_DEPENDENCE_CLIQUE (arg0) |

3121 | != MR_DEPENDENCE_CLIQUE (arg1)) |

3122 | || (MR_DEPENDENCE_BASE (arg0) |

3123 | != MR_DEPENDENCE_BASE (arg1))) |

3124 | return 0; |

3125 | /* Verify that alignment is compatible. */ |

3126 | if (TYPE_ALIGN (TREE_TYPE (arg0)) |

3127 | != TYPE_ALIGN (TREE_TYPE (arg1))) |

3128 | return 0; |

3129 | } |

3130 | flags &= ~OEP_ADDRESS_OF; |

3131 | return (OP_SAME (0) && OP_SAME (1) |

3132 | /* TARGET_MEM_REF require equal extra operands. */ |

3133 | && (TREE_CODE (arg0) != TARGET_MEM_REF |

3134 | || (OP_SAME_WITH_NULL (2) |

3135 | && OP_SAME_WITH_NULL (3) |

3136 | && OP_SAME_WITH_NULL (4)))); |

3137 | |

3138 | case ARRAY_REF: |

3139 | case ARRAY_RANGE_REF: |

3140 | if (!OP_SAME (0)) |

3141 | return 0; |

3142 | flags &= ~OEP_ADDRESS_OF; |

3143 | /* Compare the array index by value if it is constant first as we |

3144 | may have different types but same value here. */ |

3145 | return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1), |

3146 | TREE_OPERAND (arg1, 1)) |

3147 | || OP_SAME (1)) |

3148 | && OP_SAME_WITH_NULL (2) |

3149 | && OP_SAME_WITH_NULL (3) |

3150 | /* Compare low bound and element size as with OEP_ADDRESS_OF |

3151 | we have to account for the offset of the ref. */ |

3152 | && (TREE_TYPE (TREE_OPERAND (arg0, 0)) |

3153 | == TREE_TYPE (TREE_OPERAND (arg1, 0)) |

3154 | || (operand_equal_p (array_ref_low_bound |

3155 | (CONST_CAST_TREE (arg0)), |

3156 | array_ref_low_bound |

3157 | (CONST_CAST_TREE (arg1)), flags) |

3158 | && operand_equal_p (array_ref_element_size |

3159 | (CONST_CAST_TREE (arg0)), |

3160 | array_ref_element_size |

3161 | (CONST_CAST_TREE (arg1)), |

3162 | flags)))); |

3163 | |

3164 | case COMPONENT_REF: |

3165 | /* Handle operand 2 the same as for ARRAY_REF. Operand 0 |

3166 | may be NULL when we're called to compare MEM_EXPRs. */ |

3167 | if (!OP_SAME_WITH_NULL (0) |

3168 | || !OP_SAME (1)) |

3169 | return 0; |

3170 | flags &= ~OEP_ADDRESS_OF; |

3171 | return OP_SAME_WITH_NULL (2); |

3172 | |

3173 | case BIT_FIELD_REF: |

3174 | if (!OP_SAME (0)) |

3175 | return 0; |

3176 | flags &= ~OEP_ADDRESS_OF; |

3177 | return OP_SAME (1) && OP_SAME (2); |

3178 | |

3179 | default: |

3180 | return 0; |

3181 | } |

3182 | |

3183 | case tcc_expression: |

3184 | switch (TREE_CODE (arg0)) |

3185 | { |

3186 | case ADDR_EXPR: |

3187 | /* Be sure we pass right ADDRESS_OF flag. */ |

3188 | gcc_checking_assert (!(flags & OEP_ADDRESS_OF)); |

3189 | return operand_equal_p (TREE_OPERAND (arg0, 0), |

3190 | TREE_OPERAND (arg1, 0), |

3191 | flags | OEP_ADDRESS_OF); |

3192 | |

3193 | case TRUTH_NOT_EXPR: |

3194 | return OP_SAME (0); |

3195 | |

3196 | case TRUTH_ANDIF_EXPR: |

3197 | case TRUTH_ORIF_EXPR: |

3198 | return OP_SAME (0) && OP_SAME (1); |

3199 | |

3200 | case FMA_EXPR: |

3201 | case WIDEN_MULT_PLUS_EXPR: |

3202 | case WIDEN_MULT_MINUS_EXPR: |

3203 | if (!OP_SAME (2)) |

3204 | return 0; |

3205 | /* The multiplcation operands are commutative. */ |

3206 | /* FALLTHRU */ |

3207 | |

3208 | case TRUTH_AND_EXPR: |

3209 | case TRUTH_OR_EXPR: |

3210 | case TRUTH_XOR_EXPR: |

3211 | if (OP_SAME (0) && OP_SAME (1)) |

3212 | return 1; |

3213 | |

3214 | /* Otherwise take into account this is a commutative operation. */ |

3215 | return (operand_equal_p (TREE_OPERAND (arg0, 0), |

3216 | TREE_OPERAND (arg1, 1), flags) |

3217 | && operand_equal_p (TREE_OPERAND (arg0, 1), |

3218 | TREE_OPERAND (arg1, 0), flags)); |

3219 | |

3220 | case COND_EXPR: |

3221 | if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2)) |

3222 | return 0; |

3223 | flags &= ~OEP_ADDRESS_OF; |

3224 | return OP_SAME (0); |

3225 | |

3226 | case BIT_INSERT_EXPR: |

3227 | /* BIT_INSERT_EXPR has an implict operand as the type precision |

3228 | of op1. Need to check to make sure they are the same. */ |

3229 | if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST |

3230 | && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST |

3231 | && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1))) |

3232 | != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1)))) |

3233 | return false; |

3234 | /* FALLTHRU */ |

3235 | |

3236 | case VEC_COND_EXPR: |

3237 | case DOT_PROD_EXPR: |

3238 | return OP_SAME (0) && OP_SAME (1) && OP_SAME (2); |

3239 | |

3240 | case MODIFY_EXPR: |

3241 | case INIT_EXPR: |

3242 | case COMPOUND_EXPR: |

3243 | case PREDECREMENT_EXPR: |

3244 | case PREINCREMENT_EXPR: |

3245 | case POSTDECREMENT_EXPR: |

3246 | case POSTINCREMENT_EXPR: |

3247 | if (flags & OEP_LEXICOGRAPHIC) |

3248 | return OP_SAME (0) && OP_SAME (1); |

3249 | return 0; |

3250 | |

3251 | case CLEANUP_POINT_EXPR: |

3252 | case EXPR_STMT: |

3253 | if (flags & OEP_LEXICOGRAPHIC) |

3254 | return OP_SAME (0); |

3255 | return 0; |

3256 | |

3257 | default: |

3258 | return 0; |

3259 | } |

3260 | |

3261 | case tcc_vl_exp: |

3262 | switch (TREE_CODE (arg0)) |

3263 | { |

3264 | case CALL_EXPR: |

3265 | if ((CALL_EXPR_FN (arg0) == NULL_TREE) |

3266 | != (CALL_EXPR_FN (arg1) == NULL_TREE)) |

3267 | /* If not both CALL_EXPRs are either internal or normal function |

3268 | functions, then they are not equal. */ |

3269 | return 0; |

3270 | else if (CALL_EXPR_FN (arg0) == NULL_TREE) |

3271 | { |

3272 | /* If the CALL_EXPRs call different internal functions, then they |

3273 | are not equal. */ |

3274 | if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1)) |

3275 | return 0; |

3276 | } |

3277 | else |

3278 | { |

3279 | /* If the CALL_EXPRs call different functions, then they are not |

3280 | equal. */ |

3281 | if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1), |

3282 | flags)) |

3283 | return 0; |

3284 | } |

3285 | |

3286 | /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */ |

3287 | { |

3288 | unsigned int cef = call_expr_flags (arg0); |

3289 | if (flags & OEP_PURE_SAME) |

3290 | cef &= ECF_CONST | ECF_PURE; |

3291 | else |

3292 | cef &= ECF_CONST; |

3293 | if (!cef && !(flags & OEP_LEXICOGRAPHIC)) |

3294 | return 0; |

3295 | } |

3296 | |

3297 | /* Now see if all the arguments are the same. */ |

3298 | { |

3299 | const_call_expr_arg_iterator iter0, iter1; |

3300 | const_tree a0, a1; |

3301 | for (a0 = first_const_call_expr_arg (arg0, &iter0), |

3302 | a1 = first_const_call_expr_arg (arg1, &iter1); |

3303 | a0 && a1; |

3304 | a0 = next_const_call_expr_arg (&iter0), |

3305 | a1 = next_const_call_expr_arg (&iter1)) |

3306 | if (! operand_equal_p (a0, a1, flags)) |

3307 | return 0; |

3308 | |

3309 | /* If we get here and both argument lists are exhausted |

3310 | then the CALL_EXPRs are equal. */ |

3311 | return ! (a0 || a1); |

3312 | } |

3313 | default: |

3314 | return 0; |

3315 | } |

3316 | |

3317 | case tcc_declaration: |

3318 | /* Consider __builtin_sqrt equal to sqrt. */ |

3319 | return (TREE_CODE (arg0) == FUNCTION_DECL |

3320 | && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1) |

3321 | && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1) |

3322 | && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1)); |

3323 | |

3324 | case tcc_exceptional: |

3325 | if (TREE_CODE (arg0) == CONSTRUCTOR) |

3326 | { |

3327 | /* In GIMPLE constructors are used only to build vectors from |

3328 | elements. Individual elements in the constructor must be |

3329 | indexed in increasing order and form an initial sequence. |

3330 | |

3331 | We make no effort to compare constructors in generic. |

3332 | (see sem_variable::equals in ipa-icf which can do so for |

3333 | constants). */ |

3334 | if (!VECTOR_TYPE_P (TREE_TYPE (arg0)) |

3335 | || !VECTOR_TYPE_P (TREE_TYPE (arg1))) |

3336 | return 0; |

3337 | |

3338 | /* Be sure that vectors constructed have the same representation. |

3339 | We only tested element precision and modes to match. |

3340 | Vectors may be BLKmode and thus also check that the number of |

3341 | parts match. */ |

3342 | if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) |

3343 | != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))) |

3344 | return 0; |

3345 | |

3346 | vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0); |

3347 | vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1); |

3348 | unsigned int len = vec_safe_length (v0); |

3349 | |

3350 | if (len != vec_safe_length (v1)) |

3351 | return 0; |

3352 | |

3353 | for (unsigned int i = 0; i < len; i++) |

3354 | { |

3355 | constructor_elt *c0 = &(*v0)[i]; |

3356 | constructor_elt *c1 = &(*v1)[i]; |

3357 | |

3358 | if (!operand_equal_p (c0->value, c1->value, flags) |

3359 | /* In GIMPLE the indexes can be either NULL or matching i. |

3360 | Double check this so we won't get false |

3361 | positives for GENERIC. */ |

3362 | || (c0->index |

3363 | && (TREE_CODE (c0->index) != INTEGER_CST |

3364 | || !compare_tree_int (c0->index, i))) |

3365 | || (c1->index |

3366 | && (TREE_CODE (c1->index) != INTEGER_CST |

3367 | || !compare_tree_int (c1->index, i)))) |

3368 | return 0; |

3369 | } |

3370 | return 1; |

3371 | } |

3372 | else if (TREE_CODE (arg0) == STATEMENT_LIST |

3373 | && (flags & OEP_LEXICOGRAPHIC)) |

3374 | { |

3375 | /* Compare the STATEMENT_LISTs. */ |

3376 | tree_stmt_iterator tsi1, tsi2; |

3377 | tree body1 = CONST_CAST_TREE (arg0); |

3378 | tree body2 = CONST_CAST_TREE (arg1); |

3379 | for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ; |

3380 | tsi_next (&tsi1), tsi_next (&tsi2)) |

3381 | { |

3382 | /* The lists don't have the same number of statements. */ |

3383 | if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2)) |

3384 | return 0; |

3385 | if (tsi_end_p (tsi1) && tsi_end_p (tsi2)) |

3386 | return 1; |

3387 | if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2), |

3388 | OEP_LEXICOGRAPHIC)) |

3389 | return 0; |

3390 | } |

3391 | } |

3392 | return 0; |

3393 | |

3394 | case tcc_statement: |

3395 | switch (TREE_CODE (arg0)) |

3396 | { |

3397 | case RETURN_EXPR: |

3398 | if (flags & OEP_LEXICOGRAPHIC) |

3399 | return OP_SAME_WITH_NULL (0); |

3400 | return 0; |

3401 | default: |

3402 | return 0; |

3403 | } |

3404 | |

3405 | default: |

3406 | return 0; |

3407 | } |

3408 | |

3409 | #undef OP_SAME |

3410 | #undef OP_SAME_WITH_NULL |

3411 | } |

3412 | |

3413 | /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1 |

3414 | with a different signedness or a narrower precision. */ |

3415 | |

3416 | static bool |

3417 | operand_equal_for_comparison_p (tree arg0, tree arg1) |

3418 | { |

3419 | if (operand_equal_p (arg0, arg1, 0)) |

3420 | return true; |

3421 | |

3422 | if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0)) |

3423 | || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1))) |

3424 | return false; |

3425 | |

3426 | /* Discard any conversions that don't change the modes of ARG0 and ARG1 |

3427 | and see if the inner values are the same. This removes any |

3428 | signedness comparison, which doesn't matter here. */ |

3429 | tree op0 = arg0; |

3430 | tree op1 = arg1; |

3431 | STRIP_NOPS (op0); |

3432 | STRIP_NOPS (op1); |

3433 | if (operand_equal_p (op0, op1, 0)) |

3434 | return true; |

3435 | |

3436 | /* Discard a single widening conversion from ARG1 and see if the inner |

3437 | value is the same as ARG0. */ |

3438 | if (CONVERT_EXPR_P (arg1) |

3439 | && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))) |

3440 | && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))) |

3441 | < TYPE_PRECISION (TREE_TYPE (arg1)) |

3442 | && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) |

3443 | return true; |

3444 | |

3445 | return false; |

3446 | } |

3447 | |

3448 | /* See if ARG is an expression that is either a comparison or is performing |

3449 | arithmetic on comparisons. The comparisons must only be comparing |

3450 | two different values, which will be stored in *CVAL1 and *CVAL2; if |

3451 | they are nonzero it means that some operands have already been found. |

3452 | No variables may be used anywhere else in the expression except in the |

3453 | comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around |

3454 | the expression and save_expr needs to be called with CVAL1 and CVAL2. |

3455 | |

3456 | If this is true, return 1. Otherwise, return zero. */ |

3457 | |

3458 | static int |

3459 | twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p) |

3460 | { |

3461 | enum tree_code code = TREE_CODE (arg); |

3462 | enum tree_code_class tclass = TREE_CODE_CLASS (code); |

3463 | |

3464 | /* We can handle some of the tcc_expression cases here. */ |

3465 | if (tclass == tcc_expression && code == TRUTH_NOT_EXPR) |

3466 | tclass = tcc_unary; |

3467 | else if (tclass == tcc_expression |

3468 | && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR |

3469 | || code == COMPOUND_EXPR)) |

3470 | tclass = tcc_binary; |

3471 | |

3472 | else if (tclass == tcc_expression && code == SAVE_EXPR |

3473 | && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0))) |

3474 | { |

3475 | /* If we've already found a CVAL1 or CVAL2, this expression is |

3476 | two complex to handle. */ |

3477 | if (*cval1 || *cval2) |

3478 | return 0; |

3479 | |

3480 | tclass = tcc_unary; |

3481 | *save_p = 1; |

3482 | } |

3483 | |

3484 | switch (tclass) |

3485 | { |

3486 | case tcc_unary: |

3487 | return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p); |

3488 | |

3489 | case tcc_binary: |

3490 | return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p) |

3491 | && twoval_comparison_p (TREE_OPERAND (arg, 1), |

3492 | cval1, cval2, save_p)); |

3493 | |

3494 | case tcc_constant: |

3495 | return 1; |

3496 | |

3497 | case tcc_expression: |

3498 | if (code == COND_EXPR) |

3499 | return (twoval_comparison_p (TREE_OPERAND (arg, 0), |

3500 | cval1, cval2, save_p) |

3501 | && twoval_comparison_p (TREE_OPERAND (arg, 1), |

3502 | cval1, cval2, save_p) |

3503 | && twoval_comparison_p (TREE_OPERAND (arg, 2), |

3504 | cval1, cval2, save_p)); |

3505 | return 0; |

3506 | |

3507 | case tcc_comparison: |

3508 | /* First see if we can handle the first operand, then the second. For |

3509 | the second operand, we know *CVAL1 can't be zero. It must be that |

3510 | one side of the comparison is each of the values; test for the |

3511 | case where this isn't true by failing if the two operands |

3512 | are the same. */ |

3513 | |

3514 | if (operand_equal_p (TREE_OPERAND (arg, 0), |

3515 | TREE_OPERAND (arg, 1), 0)) |

3516 | return 0; |

3517 | |

3518 | if (*cval1 == 0) |

3519 | *cval1 = TREE_OPERAND (arg, 0); |

3520 | else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0)) |

3521 | ; |

3522 | else if (*cval2 == 0) |

3523 | *cval2 = TREE_OPERAND (arg, 0); |

3524 | else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0)) |

3525 | ; |

3526 | else |

3527 | return 0; |

3528 | |

3529 | if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0)) |

3530 | ; |

3531 | else if (*cval2 == 0) |

3532 | *cval2 = TREE_OPERAND (arg, 1); |

3533 | else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0)) |

3534 | ; |

3535 | else |

3536 | return 0; |

3537 | |

3538 | return 1; |

3539 | |

3540 | default: |

3541 | return 0; |

3542 | } |

3543 | } |

3544 | |

3545 | /* ARG is a tree that is known to contain just arithmetic operations and |

3546 | comparisons. Evaluate the operations in the tree substituting NEW0 for |

3547 | any occurrence of OLD0 as an operand of a comparison and likewise for |

3548 | NEW1 and OLD1. */ |

3549 | |

3550 | static tree |

3551 | eval_subst (location_t loc, tree arg, tree old0, tree new0, |

3552 | tree old1, tree new1) |

3553 | { |

3554 | tree type = TREE_TYPE (arg); |

3555 | enum tree_code code = TREE_CODE (arg); |

3556 | enum tree_code_class tclass = TREE_CODE_CLASS (code); |

3557 | |

3558 | /* We can handle some of the tcc_expression cases here. */ |

3559 | if (tclass == tcc_expression && code == TRUTH_NOT_EXPR) |

3560 | tclass = tcc_unary; |

3561 | else if (tclass == tcc_expression |

3562 | && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)) |

3563 | tclass = tcc_binary; |

3564 | |

3565 | switch (tclass) |

3566 | { |

3567 | case tcc_unary: |

3568 | return fold_build1_loc (loc, code, type, |

3569 | eval_subst (loc, TREE_OPERAND (arg, 0), |

3570 | old0, new0, old1, new1)); |

3571 | |

3572 | case tcc_binary: |

3573 | return fold_build2_loc (loc, code, type, |

3574 | eval_subst (loc, TREE_OPERAND (arg, 0), |

3575 | old0, new0, old1, new1), |

3576 | eval_subst (loc, TREE_OPERAND (arg, 1), |

3577 | old0, new0, old1, new1)); |

3578 | |

3579 | case tcc_expression: |

3580 | switch (code) |

3581 | { |

3582 | case SAVE_EXPR: |

3583 | return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0, |

3584 | old1, new1); |

3585 | |

3586 | case COMPOUND_EXPR: |

3587 | return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0, |

3588 | old1, new1); |

3589 | |

3590 | case COND_EXPR: |

3591 | return fold_build3_loc (loc, code, type, |

3592 | eval_subst (loc, TREE_OPERAND (arg, 0), |

3593 | old0, new0, old1, new1), |

3594 | eval_subst (loc, TREE_OPERAND (arg, 1), |

3595 | old0, new0, old1, new1), |

3596 | eval_subst (loc, TREE_OPERAND (arg, 2), |

3597 | old0, new0, old1, new1)); |

3598 | default: |

3599 | break; |

3600 | } |

3601 | /* Fall through - ??? */ |

3602 | |

3603 | case tcc_comparison: |

3604 | { |

3605 | tree arg0 = TREE_OPERAND (arg, 0); |

3606 | tree arg1 = TREE_OPERAND (arg, 1); |

3607 | |

3608 | /* We need to check both for exact equality and tree equality. The |

3609 | former will be true if the operand has a side-effect. In that |

3610 | case, we know the operand occurred exactly once. */ |

3611 | |

3612 | if (arg0 == old0 || operand_equal_p (arg0, old0, 0)) |

3613 | arg0 = new0; |

3614 | else if (arg0 == old1 || operand_equal_p (arg0, old1, 0)) |

3615 | arg0 = new1; |

3616 | |

3617 | if (arg1 == old0 || operand_equal_p (arg1, old0, 0)) |

3618 | arg1 = new0; |

3619 | else if (arg1 == old1 || operand_equal_p (arg1, old1, 0)) |

3620 | arg1 = new1; |

3621 | |

3622 | return fold_build2_loc (loc, code, type, arg0, arg1); |

3623 | } |

3624 | |

3625 | default: |

3626 | return arg; |

3627 | } |

3628 | } |

3629 | |

3630 | /* Return a tree for the case when the result of an expression is RESULT |

3631 | converted to TYPE and OMITTED was previously an operand of the expression |

3632 | but is now not needed (e.g., we folded OMITTED * 0). |

3633 | |

3634 | If OMITTED has side effects, we must evaluate it. Otherwise, just do |

3635 | the conversion of RESULT to TYPE. */ |

3636 | |

3637 | tree |

3638 | omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted) |

3639 | { |

3640 | tree t = fold_convert_loc (loc, type, result); |

3641 | |

3642 | /* If the resulting operand is an empty statement, just return the omitted |

3643 | statement casted to void. */ |

3644 | if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted)) |

3645 | return build1_loc (loc, NOP_EXPR, void_type_node, |

3646 | fold_ignored_result (omitted)); |

3647 | |

3648 | if (TREE_SIDE_EFFECTS (omitted)) |

3649 | return build2_loc (loc, COMPOUND_EXPR, type, |

3650 | fold_ignored_result (omitted), t); |

3651 | |

3652 | return non_lvalue_loc (loc, t); |

3653 | } |

3654 | |

3655 | /* Return a tree for the case when the result of an expression is RESULT |

3656 | converted to TYPE and OMITTED1 and OMITTED2 were previously operands |

3657 | of the expression but are now not needed. |

3658 | |

3659 | If OMITTED1 or OMITTED2 has side effects, they must be evaluated. |

3660 | If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is |

3661 | evaluated before OMITTED2. Otherwise, if neither has side effects, |

3662 | just do the conversion of RESULT to TYPE. */ |

3663 | |

3664 | tree |

3665 | omit_two_operands_loc (location_t loc, tree type, tree result, |

3666 | tree omitted1, tree omitted2) |

3667 | { |

3668 | tree t = fold_convert_loc (loc, type, result); |

3669 | |

3670 | if (TREE_SIDE_EFFECTS (omitted2)) |

3671 | t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t); |

3672 | if (TREE_SIDE_EFFECTS (omitted1)) |

3673 | t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t); |

3674 | |

3675 | return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t; |

3676 | } |

3677 | |

3678 | |

3679 | /* Return a simplified tree node for the truth-negation of ARG. This |

3680 | never alters ARG itself. We assume that ARG is an operation that |

3681 | returns a truth value (0 or 1). |

3682 | |

3683 | FIXME: one would think we would fold the result, but it causes |

3684 | problems with the dominator optimizer. */ |

3685 | |

3686 | static tree |

3687 | fold_truth_not_expr (location_t loc, tree arg) |

3688 | { |

3689 | tree type = TREE_TYPE (arg); |

3690 | enum tree_code code = TREE_CODE (arg); |

3691 | location_t loc1, loc2; |

3692 | |

3693 | /* If this is a comparison, we can simply invert it, except for |

3694 | floating-point non-equality comparisons, in which case we just |

3695 | enclose a TRUTH_NOT_EXPR around what we have. */ |

3696 | |

3697 | if (TREE_CODE_CLASS (code) == tcc_comparison) |

3698 | { |

3699 | tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0)); |

3700 | if (FLOAT_TYPE_P (op_type) |

3701 | && flag_trapping_math |

3702 | && code != ORDERED_EXPR && code != UNORDERED_EXPR |

3703 | && code != NE_EXPR && code != EQ_EXPR) |

3704 | return NULL_TREE; |

3705 | |

3706 | code = invert_tree_comparison (code, HONOR_NANS (op_type)); |

3707 | if (code == ERROR_MARK) |

3708 | return NULL_TREE; |

3709 | |

3710 | tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0), |

3711 | TREE_OPERAND (arg, 1)); |

3712 | if (TREE_NO_WARNING (arg)) |

3713 | TREE_NO_WARNING (ret) = 1; |

3714 | return ret; |

3715 | } |

3716 | |

3717 | switch (code) |

3718 | { |

3719 | case INTEGER_CST: |

3720 | return constant_boolean_node (integer_zerop (arg), type); |

3721 | |

3722 | case TRUTH_AND_EXPR: |

3723 | loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc); |

3724 | loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc); |

3725 | return build2_loc (loc, TRUTH_OR_EXPR, type, |

3726 | invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)), |

3727 | invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1))); |

3728 | |

3729 | case TRUTH_OR_EXPR: |

3730 | loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc); |

3731 | loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc); |

3732 | return build2_loc (loc, TRUTH_AND_EXPR, type, |

3733 | invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)), |

3734 | invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1))); |

3735 | |

3736 | case TRUTH_XOR_EXPR: |

3737 | /* Here we can invert either operand. We invert the first operand |

3738 | unless the second operand is a TRUTH_NOT_EXPR in which case our |

3739 | result is the XOR of the first operand with the inside of the |

3740 | negation of the second operand. */ |

3741 | |

3742 | if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR) |

3743 | return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0), |

3744 | TREE_OPERAND (TREE_OPERAND (arg, 1), 0)); |

3745 | else |

3746 | return build2_loc (loc, TRUTH_XOR_EXPR, type, |

3747 | invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)), |

3748 | TREE_OPERAND (arg, 1)); |

3749 | |

3750 | case TRUTH_ANDIF_EXPR: |

3751 | loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc); |

3752 | loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc); |

3753 | return build2_loc (loc, TRUTH_ORIF_EXPR, type, |

3754 | invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)), |

3755 | invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1))); |

3756 | |

3757 | case TRUTH_ORIF_EXPR: |

3758 | loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc); |

3759 | loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc); |

3760 | return build2_loc (loc, TRUTH_ANDIF_EXPR, type, |

3761 | invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)), |

3762 | invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1))); |

3763 | |

3764 | case TRUTH_NOT_EXPR: |

3765 | return TREE_OPERAND (arg, 0); |

3766 | |

3767 | case COND_EXPR: |

3768 | { |

3769 | tree arg1 = TREE_OPERAND (arg, 1); |

3770 | tree arg2 = TREE_OPERAND (arg, 2); |

3771 | |

3772 | loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc); |

3773 | loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc); |

3774 | |

3775 | /* A COND_EXPR may have a throw as one operand, which |

3776 | then has void type. Just leave void operands |

3777 | as they are. */ |

3778 | return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0), |

3779 | VOID_TYPE_P (TREE_TYPE (arg1)) |

3780 | ? arg1 : invert_truthvalue_loc (loc1, arg1), |

3781 | VOID_TYPE_P (TREE_TYPE (arg2)) |

3782 | ? arg2 : invert_truthvalue_loc (loc2, arg2)); |

3783 | } |

3784 | |

3785 | case COMPOUND_EXPR: |

3786 | loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc); |

3787 | return build2_loc (loc, COMPOUND_EXPR, type, |

3788 | TREE_OPERAND (arg, 0), |

3789 | invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1))); |

3790 | |

3791 | case NON_LVALUE_EXPR: |

3792 | loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc); |

3793 | return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)); |

3794 | |

3795 | CASE_CONVERT: |

3796 | if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE) |

3797 | return build1_loc (loc, TRUTH_NOT_EXPR, type, arg); |

3798 | |

3799 | /* fall through */ |

3800 | |

3801 | case FLOAT_EXPR: |

3802 | loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc); |

3803 | return build1_loc (loc, TREE_CODE (arg), type, |

3804 | invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0))); |

3805 | |

3806 | case BIT_AND_EXPR: |

3807 | if (!integer_onep (TREE_OPERAND (arg, 1))) |

3808 | return NULL_TREE; |

3809 | return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0)); |

3810 | |

3811 | case SAVE_EXPR: |

3812 | return build1_loc (loc, TRUTH_NOT_EXPR, type, arg); |

3813 | |

3814 | case CLEANUP_POINT_EXPR: |

3815 | loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc); |

3816 | return build1_loc (loc, CLEANUP_POINT_EXPR, type, |

3817 | invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0))); |

3818 | |

3819 | default: |

3820 | return NULL_TREE; |

3821 | } |

3822 | } |

3823 | |

3824 | /* Fold the truth-negation of ARG. This never alters ARG itself. We |

3825 | assume that ARG is an operation that returns a truth value (0 or 1 |

3826 | for scalars, 0 or -1 for vectors). Return the folded expression if |

3827 | folding is successful. Otherwise, return NULL_TREE. */ |

3828 | |

3829 | static tree |

3830 | fold_invert_truthvalue (location_t loc, tree arg) |

3831 | { |

3832 | tree type = TREE_TYPE (arg); |

3833 | return fold_unary_loc (loc, VECTOR_TYPE_P (type) |

3834 | ? BIT_NOT_EXPR |

3835 | : TRUTH_NOT_EXPR, |

3836 | type, arg); |

3837 | } |

3838 | |

3839 | /* Return a simplified tree node for the truth-negation of ARG. This |

3840 | never alters ARG itself. We assume that ARG is an operation that |

3841 | returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */ |

3842 | |

3843 | tree |

3844 | invert_truthvalue_loc (location_t loc, tree arg) |

3845 | { |

3846 | if (TREE_CODE (arg) == ERROR_MARK) |

3847 | return arg; |

3848 | |

3849 | tree type = TREE_TYPE (arg); |

3850 | return fold_build1_loc (loc, VECTOR_TYPE_P (type) |

3851 | ? BIT_NOT_EXPR |

3852 | : TRUTH_NOT_EXPR, |

3853 | type, arg); |

3854 | } |

3855 | |

3856 | /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER |

3857 | starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero |

3858 | and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER |

3859 | is the original memory reference used to preserve the alias set of |

3860 | the access. */ |

3861 | |

3862 | static tree |

3863 | make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type, |

3864 | HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, |

3865 | int unsignedp, int reversep) |

3866 | { |

3867 | tree result, bftype; |

3868 | |

3869 | /* Attempt not to lose the access path if possible. */ |

3870 | if (TREE_CODE (orig_inner) == COMPONENT_REF) |

3871 | { |

3872 | tree ninner = TREE_OPERAND (orig_inner, 0); |

3873 | machine_mode nmode; |

3874 | HOST_WIDE_INT nbitsize, nbitpos; |

3875 | tree noffset; |

3876 | int nunsignedp, nreversep, nvolatilep = 0; |

3877 | tree base = get_inner_reference (ninner, &nbitsize, &nbitpos, |

3878 | &noffset, &nmode, &nunsignedp, |

3879 | &nreversep, &nvolatilep); |

3880 | if (base == inner |

3881 | && noffset == NULL_TREE |

3882 | && nbitsize >= bitsize |

3883 | && nbitpos <= bitpos |

3884 | && bitpos + bitsize <= nbitpos + nbitsize |

3885 | && !reversep |

3886 | && !nreversep |

3887 | && !nvolatilep) |

3888 | { |

3889 | inner = ninner; |

3890 | bitpos -= nbitpos; |

3891 | } |

3892 | } |

3893 | |

3894 | alias_set_type iset = get_alias_set (orig_inner); |

3895 | if (iset == 0 && get_alias_set (inner) != iset) |

3896 | inner = fold_build2 (MEM_REF, TREE_TYPE (inner), |

3897 | build_fold_addr_expr (inner), |

3898 | build_int_cst (ptr_type_node, 0)); |

3899 | |

3900 | if (bitpos == 0 && !reversep) |

3901 | { |

3902 | tree size = TYPE_SIZE (TREE_TYPE (inner)); |

3903 | if ((INTEGRAL_TYPE_P (TREE_TYPE (inner)) |

3904 | || POINTER_TYPE_P (TREE_TYPE (inner))) |

3905 | && tree_fits_shwi_p (size) |

3906 | && tree_to_shwi (size) == bitsize) |

3907 | return fold_convert_loc (loc, type, inner); |

3908 | } |

3909 | |

3910 | bftype = type; |

3911 | if (TYPE_PRECISION (bftype) != bitsize |

3912 | || TYPE_UNSIGNED (bftype) == !unsignedp) |

3913 | bftype = build_nonstandard_integer_type (bitsize, 0); |

3914 | |

3915 | result = build3_loc (loc, BIT_FIELD_REF, bftype, inner, |

3916 | bitsize_int (bitsize), bitsize_int (bitpos)); |

3917 | REF_REVERSE_STORAGE_ORDER (result) = reversep; |

3918 | |

3919 | if (bftype != type) |

3920 | result = fold_convert_loc (loc, type, result); |

3921 | |

3922 | return result; |

3923 | } |

3924 | |

3925 | /* Optimize a bit-field compare. |

3926 | |

3927 | There are two cases: First is a compare against a constant and the |

3928 | second is a comparison of two items where the fields are at the same |

3929 | bit position relative to the start of a chunk (byte, halfword, word) |

3930 | large enough to contain it. In these cases we can avoid the shift |

3931 | implicit in bitfield extractions. |

3932 | |

3933 | For constants, we emit a compare of the shifted constant with the |

3934 | BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being |

3935 | compared. For two fields at the same position, we do the ANDs with the |

3936 | similar mask and compare the result of the ANDs. |

3937 | |

3938 | CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR. |

3939 | COMPARE_TYPE is the type of the comparison, and LHS and RHS |

3940 | are the left and right operands of the comparison, respectively. |

3941 | |

3942 | If the optimization described above can be done, we return the resulting |

3943 | tree. Otherwise we return zero. */ |

3944 | |

3945 | static tree |

3946 | optimize_bit_field_compare (location_t loc, enum tree_code code, |

3947 | tree compare_type, tree lhs, tree rhs) |

3948 | { |

3949 | HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize; |

3950 | tree type = TREE_TYPE (lhs); |

3951 | tree unsigned_type; |

3952 | int const_p = TREE_CODE (rhs) == INTEGER_CST; |

3953 | machine_mode lmode, rmode; |

3954 | scalar_int_mode nmode; |

3955 | int lunsignedp, runsignedp; |

3956 | int lreversep, rreversep; |

3957 | int lvolatilep = 0, rvolatilep = 0; |

3958 | tree linner, rinner = NULL_TREE; |

3959 | tree mask; |

3960 | tree offset; |

3961 | |

3962 | /* Get all the information about the extractions being done. If the bit size |

3963 | if the same as the size of the underlying object, we aren't doing an |

3964 | extraction at all and so can do nothing. We also don't want to |

3965 | do anything if the inner expression is a PLACEHOLDER_EXPR since we |

3966 | then will no longer be able to replace it. */ |

3967 | linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode, |

3968 | &lunsignedp, &lreversep, &lvolatilep); |

3969 | if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0 |

3970 | || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep) |

3971 | return 0; |

3972 | |

3973 | if ( |