1/* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2017 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "backend.h"
24#include "target.h"
25#include "rtl.h"
26#include "tree.h"
27#include "predict.h"
28#include "memmodel.h"
29#include "tm_p.h"
30#include "optabs.h"
31#include "emit-rtl.h"
32#include "fold-const.h"
33#include "stor-layout.h"
34/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
35#include "dojump.h"
36#include "explow.h"
37#include "expr.h"
38#include "langhooks.h"
39
40static bool prefer_and_bit_test (scalar_int_mode, int);
41static void do_jump_by_parts_greater (scalar_int_mode, tree, tree, int,
42 rtx_code_label *, rtx_code_label *,
43 profile_probability);
44static void do_jump_by_parts_equality (scalar_int_mode, tree, tree,
45 rtx_code_label *, rtx_code_label *,
46 profile_probability);
47static void do_compare_and_jump (tree, tree, enum rtx_code, enum rtx_code,
48 rtx_code_label *, rtx_code_label *,
49 profile_probability);
50
51/* At the start of a function, record that we have no previously-pushed
52 arguments waiting to be popped. */
53
54void
55init_pending_stack_adjust (void)
56{
57 pending_stack_adjust = 0;
58}
59
60/* Discard any pending stack adjustment. This avoid relying on the
61 RTL optimizers to remove useless adjustments when we know the
62 stack pointer value is dead. */
63void
64discard_pending_stack_adjust (void)
65{
66 stack_pointer_delta -= pending_stack_adjust;
67 pending_stack_adjust = 0;
68}
69
70/* When exiting from function, if safe, clear out any pending stack adjust
71 so the adjustment won't get done.
72
73 Note, if the current function calls alloca, then it must have a
74 frame pointer regardless of the value of flag_omit_frame_pointer. */
75
76void
77clear_pending_stack_adjust (void)
78{
79 if (optimize > 0
80 && (! flag_omit_frame_pointer || cfun->calls_alloca)
81 && EXIT_IGNORE_STACK)
82 discard_pending_stack_adjust ();
83}
84
85/* Pop any previously-pushed arguments that have not been popped yet. */
86
87void
88do_pending_stack_adjust (void)
89{
90 if (inhibit_defer_pop == 0)
91 {
92 if (pending_stack_adjust != 0)
93 adjust_stack (GEN_INT (pending_stack_adjust));
94 pending_stack_adjust = 0;
95 }
96}
97
98/* Remember pending_stack_adjust/stack_pointer_delta.
99 To be used around code that may call do_pending_stack_adjust (),
100 but the generated code could be discarded e.g. using delete_insns_since. */
101
102void
103save_pending_stack_adjust (saved_pending_stack_adjust *save)
104{
105 save->x_pending_stack_adjust = pending_stack_adjust;
106 save->x_stack_pointer_delta = stack_pointer_delta;
107}
108
109/* Restore the saved pending_stack_adjust/stack_pointer_delta. */
110
111void
112restore_pending_stack_adjust (saved_pending_stack_adjust *save)
113{
114 if (inhibit_defer_pop == 0)
115 {
116 pending_stack_adjust = save->x_pending_stack_adjust;
117 stack_pointer_delta = save->x_stack_pointer_delta;
118 }
119}
120
121/* Expand conditional expressions. */
122
123/* Generate code to evaluate EXP and jump to LABEL if the value is zero. */
124
125void
126jumpifnot (tree exp, rtx_code_label *label, profile_probability prob)
127{
128 do_jump (exp, label, NULL, prob.invert ());
129}
130
131void
132jumpifnot_1 (enum tree_code code, tree op0, tree op1, rtx_code_label *label,
133 profile_probability prob)
134{
135 do_jump_1 (code, op0, op1, label, NULL, prob.invert ());
136}
137
138/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
139
140void
141jumpif (tree exp, rtx_code_label *label, profile_probability prob)
142{
143 do_jump (exp, NULL, label, prob);
144}
145
146void
147jumpif_1 (enum tree_code code, tree op0, tree op1,
148 rtx_code_label *label, profile_probability prob)
149{
150 do_jump_1 (code, op0, op1, NULL, label, prob);
151}
152
153/* Used internally by prefer_and_bit_test. */
154
155static GTY(()) rtx and_reg;
156static GTY(()) rtx and_test;
157static GTY(()) rtx shift_test;
158
159/* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
160 where X is an arbitrary register of mode MODE. Return true if the former
161 is preferred. */
162
163static bool
164prefer_and_bit_test (scalar_int_mode mode, int bitnum)
165{
166 bool speed_p;
167 wide_int mask = wi::set_bit_in_zero (bitnum, GET_MODE_PRECISION (mode));
168
169 if (and_test == 0)
170 {
171 /* Set up rtxes for the two variations. Use NULL as a placeholder
172 for the BITNUM-based constants. */
173 and_reg = gen_rtx_REG (mode, LAST_VIRTUAL_REGISTER + 1);
174 and_test = gen_rtx_AND (mode, and_reg, NULL);
175 shift_test = gen_rtx_AND (mode, gen_rtx_ASHIFTRT (mode, and_reg, NULL),
176 const1_rtx);
177 }
178 else
179 {
180 /* Change the mode of the previously-created rtxes. */
181 PUT_MODE (and_reg, mode);
182 PUT_MODE (and_test, mode);
183 PUT_MODE (shift_test, mode);
184 PUT_MODE (XEXP (shift_test, 0), mode);
185 }
186
187 /* Fill in the integers. */
188 XEXP (and_test, 1) = immed_wide_int_const (mask, mode);
189 XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum);
190
191 speed_p = optimize_insn_for_speed_p ();
192 return (rtx_cost (and_test, mode, IF_THEN_ELSE, 0, speed_p)
193 <= rtx_cost (shift_test, mode, IF_THEN_ELSE, 0, speed_p));
194}
195
196/* Subroutine of do_jump, dealing with exploded comparisons of the type
197 OP0 CODE OP1 . IF_FALSE_LABEL and IF_TRUE_LABEL like in do_jump.
198 PROB is probability of jump to if_true_label. */
199
200void
201do_jump_1 (enum tree_code code, tree op0, tree op1,
202 rtx_code_label *if_false_label, rtx_code_label *if_true_label,
203 profile_probability prob)
204{
205 machine_mode mode;
206 rtx_code_label *drop_through_label = 0;
207 scalar_int_mode int_mode;
208
209 switch (code)
210 {
211 case EQ_EXPR:
212 {
213 tree inner_type = TREE_TYPE (op0);
214
215 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
216 != MODE_COMPLEX_FLOAT);
217 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
218 != MODE_COMPLEX_INT);
219
220 if (integer_zerop (op1))
221 do_jump (op0, if_true_label, if_false_label,
222 prob.invert ());
223 else if (is_int_mode (TYPE_MODE (inner_type), &int_mode)
224 && !can_compare_p (EQ, int_mode, ccp_jump))
225 do_jump_by_parts_equality (int_mode, op0, op1, if_false_label,
226 if_true_label, prob);
227 else
228 do_compare_and_jump (op0, op1, EQ, EQ, if_false_label, if_true_label,
229 prob);
230 break;
231 }
232
233 case NE_EXPR:
234 {
235 tree inner_type = TREE_TYPE (op0);
236
237 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
238 != MODE_COMPLEX_FLOAT);
239 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
240 != MODE_COMPLEX_INT);
241
242 if (integer_zerop (op1))
243 do_jump (op0, if_false_label, if_true_label, prob);
244 else if (is_int_mode (TYPE_MODE (inner_type), &int_mode)
245 && !can_compare_p (NE, int_mode, ccp_jump))
246 do_jump_by_parts_equality (int_mode, op0, op1, if_true_label,
247 if_false_label, prob.invert ());
248 else
249 do_compare_and_jump (op0, op1, NE, NE, if_false_label, if_true_label,
250 prob);
251 break;
252 }
253
254 case LT_EXPR:
255 mode = TYPE_MODE (TREE_TYPE (op0));
256 if (is_int_mode (mode, &int_mode)
257 && ! can_compare_p (LT, int_mode, ccp_jump))
258 do_jump_by_parts_greater (int_mode, op0, op1, 1, if_false_label,
259 if_true_label, prob);
260 else
261 do_compare_and_jump (op0, op1, LT, LTU, if_false_label, if_true_label,
262 prob);
263 break;
264
265 case LE_EXPR:
266 mode = TYPE_MODE (TREE_TYPE (op0));
267 if (is_int_mode (mode, &int_mode)
268 && ! can_compare_p (LE, int_mode, ccp_jump))
269 do_jump_by_parts_greater (int_mode, op0, op1, 0, if_true_label,
270 if_false_label, prob.invert ());
271 else
272 do_compare_and_jump (op0, op1, LE, LEU, if_false_label, if_true_label,
273 prob);
274 break;
275
276 case GT_EXPR:
277 mode = TYPE_MODE (TREE_TYPE (op0));
278 if (is_int_mode (mode, &int_mode)
279 && ! can_compare_p (GT, int_mode, ccp_jump))
280 do_jump_by_parts_greater (int_mode, op0, op1, 0, if_false_label,
281 if_true_label, prob);
282 else
283 do_compare_and_jump (op0, op1, GT, GTU, if_false_label, if_true_label,
284 prob);
285 break;
286
287 case GE_EXPR:
288 mode = TYPE_MODE (TREE_TYPE (op0));
289 if (is_int_mode (mode, &int_mode)
290 && ! can_compare_p (GE, int_mode, ccp_jump))
291 do_jump_by_parts_greater (int_mode, op0, op1, 1, if_true_label,
292 if_false_label, prob.invert ());
293 else
294 do_compare_and_jump (op0, op1, GE, GEU, if_false_label, if_true_label,
295 prob);
296 break;
297
298 case ORDERED_EXPR:
299 do_compare_and_jump (op0, op1, ORDERED, ORDERED,
300 if_false_label, if_true_label, prob);
301 break;
302
303 case UNORDERED_EXPR:
304 do_compare_and_jump (op0, op1, UNORDERED, UNORDERED,
305 if_false_label, if_true_label, prob);
306 break;
307
308 case UNLT_EXPR:
309 do_compare_and_jump (op0, op1, UNLT, UNLT, if_false_label, if_true_label,
310 prob);
311 break;
312
313 case UNLE_EXPR:
314 do_compare_and_jump (op0, op1, UNLE, UNLE, if_false_label, if_true_label,
315 prob);
316 break;
317
318 case UNGT_EXPR:
319 do_compare_and_jump (op0, op1, UNGT, UNGT, if_false_label, if_true_label,
320 prob);
321 break;
322
323 case UNGE_EXPR:
324 do_compare_and_jump (op0, op1, UNGE, UNGE, if_false_label, if_true_label,
325 prob);
326 break;
327
328 case UNEQ_EXPR:
329 do_compare_and_jump (op0, op1, UNEQ, UNEQ, if_false_label, if_true_label,
330 prob);
331 break;
332
333 case LTGT_EXPR:
334 do_compare_and_jump (op0, op1, LTGT, LTGT, if_false_label, if_true_label,
335 prob);
336 break;
337
338 case TRUTH_ANDIF_EXPR:
339 {
340 /* Spread the probability that the expression is false evenly between
341 the two conditions. So the first condition is false half the total
342 probability of being false. The second condition is false the other
343 half of the total probability of being false, so its jump has a false
344 probability of half the total, relative to the probability we
345 reached it (i.e. the first condition was true). */
346 profile_probability op0_prob = profile_probability::uninitialized ();
347 profile_probability op1_prob = profile_probability::uninitialized ();
348 if (prob.initialized_p ())
349 {
350 profile_probability false_prob = prob.invert ();
351 profile_probability op0_false_prob = false_prob.apply_scale (1, 2);
352 profile_probability op1_false_prob = false_prob.apply_scale (1, 2)
353 / op0_false_prob.invert ();
354 /* Get the probability that each jump below is true. */
355 op0_prob = op0_false_prob.invert ();
356 op1_prob = op1_false_prob.invert ();
357 }
358 if (if_false_label == NULL)
359 {
360 drop_through_label = gen_label_rtx ();
361 do_jump (op0, drop_through_label, NULL, op0_prob);
362 do_jump (op1, NULL, if_true_label, op1_prob);
363 }
364 else
365 {
366 do_jump (op0, if_false_label, NULL, op0_prob);
367 do_jump (op1, if_false_label, if_true_label, op1_prob);
368 }
369 break;
370 }
371
372 case TRUTH_ORIF_EXPR:
373 {
374 /* Spread the probability evenly between the two conditions. So
375 the first condition has half the total probability of being true.
376 The second condition has the other half of the total probability,
377 so its jump has a probability of half the total, relative to
378 the probability we reached it (i.e. the first condition was false). */
379 profile_probability op0_prob = profile_probability::uninitialized ();
380 profile_probability op1_prob = profile_probability::uninitialized ();
381 if (prob.initialized_p ())
382 {
383 op0_prob = prob.apply_scale (1, 2);
384 op1_prob = prob.apply_scale (1, 2) / op0_prob.invert ();
385 }
386 if (if_true_label == NULL)
387 {
388 drop_through_label = gen_label_rtx ();
389 do_jump (op0, NULL, drop_through_label, op0_prob);
390 do_jump (op1, if_false_label, NULL, op1_prob);
391 }
392 else
393 {
394 do_jump (op0, NULL, if_true_label, op0_prob);
395 do_jump (op1, if_false_label, if_true_label, op1_prob);
396 }
397 break;
398 }
399
400 default:
401 gcc_unreachable ();
402 }
403
404 if (drop_through_label)
405 {
406 do_pending_stack_adjust ();
407 emit_label (drop_through_label);
408 }
409}
410
411/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
412 the result is zero, or IF_TRUE_LABEL if the result is one.
413 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
414 meaning fall through in that case.
415
416 do_jump always does any pending stack adjust except when it does not
417 actually perform a jump. An example where there is no jump
418 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
419
420 PROB is probability of jump to if_true_label. */
421
422void
423do_jump (tree exp, rtx_code_label *if_false_label,
424 rtx_code_label *if_true_label, profile_probability prob)
425{
426 enum tree_code code = TREE_CODE (exp);
427 rtx temp;
428 int i;
429 tree type;
430 scalar_int_mode mode;
431 rtx_code_label *drop_through_label = NULL;
432
433 switch (code)
434 {
435 case ERROR_MARK:
436 break;
437
438 case INTEGER_CST:
439 {
440 rtx_code_label *lab = integer_zerop (exp) ? if_false_label
441 : if_true_label;
442 if (lab)
443 emit_jump (lab);
444 break;
445 }
446
447#if 0
448 /* This is not true with #pragma weak */
449 case ADDR_EXPR:
450 /* The address of something can never be zero. */
451 if (if_true_label)
452 emit_jump (if_true_label);
453 break;
454#endif
455
456 case NOP_EXPR:
457 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
458 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
459 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
460 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
461 goto normal;
462 /* FALLTHRU */
463 case CONVERT_EXPR:
464 /* If we are narrowing the operand, we have to do the compare in the
465 narrower mode. */
466 if ((TYPE_PRECISION (TREE_TYPE (exp))
467 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
468 goto normal;
469 /* FALLTHRU */
470 case NON_LVALUE_EXPR:
471 case ABS_EXPR:
472 case NEGATE_EXPR:
473 case LROTATE_EXPR:
474 case RROTATE_EXPR:
475 /* These cannot change zero->nonzero or vice versa. */
476 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label, prob);
477 break;
478
479 case TRUTH_NOT_EXPR:
480 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label,
481 prob.invert ());
482 break;
483
484 case COND_EXPR:
485 {
486 rtx_code_label *label1 = gen_label_rtx ();
487 if (!if_true_label || !if_false_label)
488 {
489 drop_through_label = gen_label_rtx ();
490 if (!if_true_label)
491 if_true_label = drop_through_label;
492 if (!if_false_label)
493 if_false_label = drop_through_label;
494 }
495
496 do_pending_stack_adjust ();
497 do_jump (TREE_OPERAND (exp, 0), label1, NULL,
498 profile_probability::uninitialized ());
499 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label, prob);
500 emit_label (label1);
501 do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label, prob);
502 break;
503 }
504
505 case COMPOUND_EXPR:
506 /* Lowered by gimplify.c. */
507 gcc_unreachable ();
508
509 case MINUS_EXPR:
510 /* Nonzero iff operands of minus differ. */
511 code = NE_EXPR;
512
513 /* FALLTHRU */
514 case EQ_EXPR:
515 case NE_EXPR:
516 case LT_EXPR:
517 case LE_EXPR:
518 case GT_EXPR:
519 case GE_EXPR:
520 case ORDERED_EXPR:
521 case UNORDERED_EXPR:
522 case UNLT_EXPR:
523 case UNLE_EXPR:
524 case UNGT_EXPR:
525 case UNGE_EXPR:
526 case UNEQ_EXPR:
527 case LTGT_EXPR:
528 case TRUTH_ANDIF_EXPR:
529 case TRUTH_ORIF_EXPR:
530 other_code:
531 do_jump_1 (code, TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
532 if_false_label, if_true_label, prob);
533 break;
534
535 case BIT_AND_EXPR:
536 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
537 See if the former is preferred for jump tests and restore it
538 if so. */
539 if (integer_onep (TREE_OPERAND (exp, 1)))
540 {
541 tree exp0 = TREE_OPERAND (exp, 0);
542 rtx_code_label *set_label, *clr_label;
543 profile_probability setclr_prob = prob;
544
545 /* Strip narrowing integral type conversions. */
546 while (CONVERT_EXPR_P (exp0)
547 && TREE_OPERAND (exp0, 0) != error_mark_node
548 && TYPE_PRECISION (TREE_TYPE (exp0))
549 <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
550 exp0 = TREE_OPERAND (exp0, 0);
551
552 /* "exp0 ^ 1" inverts the sense of the single bit test. */
553 if (TREE_CODE (exp0) == BIT_XOR_EXPR
554 && integer_onep (TREE_OPERAND (exp0, 1)))
555 {
556 exp0 = TREE_OPERAND (exp0, 0);
557 clr_label = if_true_label;
558 set_label = if_false_label;
559 setclr_prob = prob.invert ();
560 }
561 else
562 {
563 clr_label = if_false_label;
564 set_label = if_true_label;
565 }
566
567 if (TREE_CODE (exp0) == RSHIFT_EXPR)
568 {
569 tree arg = TREE_OPERAND (exp0, 0);
570 tree shift = TREE_OPERAND (exp0, 1);
571 tree argtype = TREE_TYPE (arg);
572 if (TREE_CODE (shift) == INTEGER_CST
573 && compare_tree_int (shift, 0) >= 0
574 && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
575 && prefer_and_bit_test (SCALAR_INT_TYPE_MODE (argtype),
576 TREE_INT_CST_LOW (shift)))
577 {
578 unsigned HOST_WIDE_INT mask
579 = HOST_WIDE_INT_1U << TREE_INT_CST_LOW (shift);
580 do_jump (build2 (BIT_AND_EXPR, argtype, arg,
581 build_int_cstu (argtype, mask)),
582 clr_label, set_label, setclr_prob);
583 break;
584 }
585 }
586 }
587
588 /* If we are AND'ing with a small constant, do this comparison in the
589 smallest type that fits. If the machine doesn't have comparisons
590 that small, it will be converted back to the wider comparison.
591 This helps if we are testing the sign bit of a narrower object.
592 combine can't do this for us because it can't know whether a
593 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
594
595 if (! SLOW_BYTE_ACCESS
596 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
597 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
598 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
599 && int_mode_for_size (i + 1, 0).exists (&mode)
600 && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
601 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
602 && have_insn_for (COMPARE, TYPE_MODE (type)))
603 {
604 do_jump (fold_convert (type, exp), if_false_label, if_true_label,
605 prob);
606 break;
607 }
608
609 if (TYPE_PRECISION (TREE_TYPE (exp)) > 1
610 || TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
611 goto normal;
612
613 /* Boolean comparisons can be compiled as TRUTH_AND_EXPR. */
614 /* FALLTHRU */
615
616 case TRUTH_AND_EXPR:
617 /* High branch cost, expand as the bitwise AND of the conditions.
618 Do the same if the RHS has side effects, because we're effectively
619 turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR. */
620 if (BRANCH_COST (optimize_insn_for_speed_p (),
621 false) >= 4
622 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
623 goto normal;
624 code = TRUTH_ANDIF_EXPR;
625 goto other_code;
626
627 case BIT_IOR_EXPR:
628 case TRUTH_OR_EXPR:
629 /* High branch cost, expand as the bitwise OR of the conditions.
630 Do the same if the RHS has side effects, because we're effectively
631 turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR. */
632 if (BRANCH_COST (optimize_insn_for_speed_p (), false) >= 4
633 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
634 goto normal;
635 code = TRUTH_ORIF_EXPR;
636 goto other_code;
637
638 /* Fall through and generate the normal code. */
639 default:
640 normal:
641 temp = expand_normal (exp);
642 do_pending_stack_adjust ();
643 /* The RTL optimizers prefer comparisons against pseudos. */
644 if (GET_CODE (temp) == SUBREG)
645 {
646 /* Compare promoted variables in their promoted mode. */
647 if (SUBREG_PROMOTED_VAR_P (temp)
648 && REG_P (XEXP (temp, 0)))
649 temp = XEXP (temp, 0);
650 else
651 temp = copy_to_reg (temp);
652 }
653 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
654 NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
655 GET_MODE (temp), NULL_RTX,
656 if_false_label, if_true_label, prob);
657 }
658
659 if (drop_through_label)
660 {
661 do_pending_stack_adjust ();
662 emit_label (drop_through_label);
663 }
664}
665
666/* Compare OP0 with OP1, word at a time, in mode MODE.
667 UNSIGNEDP says to do unsigned comparison.
668 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
669
670static void
671do_jump_by_parts_greater_rtx (scalar_int_mode mode, int unsignedp, rtx op0,
672 rtx op1, rtx_code_label *if_false_label,
673 rtx_code_label *if_true_label,
674 profile_probability prob)
675{
676 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
677 rtx_code_label *drop_through_label = 0;
678 bool drop_through_if_true = false, drop_through_if_false = false;
679 enum rtx_code code = GT;
680 int i;
681
682 if (! if_true_label || ! if_false_label)
683 drop_through_label = gen_label_rtx ();
684 if (! if_true_label)
685 {
686 if_true_label = drop_through_label;
687 drop_through_if_true = true;
688 }
689 if (! if_false_label)
690 {
691 if_false_label = drop_through_label;
692 drop_through_if_false = true;
693 }
694
695 /* Deal with the special case 0 > x: only one comparison is necessary and
696 we reverse it to avoid jumping to the drop-through label. */
697 if (op0 == const0_rtx && drop_through_if_true && !drop_through_if_false)
698 {
699 code = LE;
700 if_true_label = if_false_label;
701 if_false_label = drop_through_label;
702 drop_through_if_true = false;
703 drop_through_if_false = true;
704 prob = prob.invert ();
705 }
706
707 /* Compare a word at a time, high order first. */
708 for (i = 0; i < nwords; i++)
709 {
710 rtx op0_word, op1_word;
711
712 if (WORDS_BIG_ENDIAN)
713 {
714 op0_word = operand_subword_force (op0, i, mode);
715 op1_word = operand_subword_force (op1, i, mode);
716 }
717 else
718 {
719 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
720 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
721 }
722
723 /* All but high-order word must be compared as unsigned. */
724 do_compare_rtx_and_jump (op0_word, op1_word, code, (unsignedp || i > 0),
725 word_mode, NULL_RTX, NULL, if_true_label,
726 prob);
727
728 /* Emit only one comparison for 0. Do not emit the last cond jump. */
729 if (op0 == const0_rtx || i == nwords - 1)
730 break;
731
732 /* Consider lower words only if these are equal. */
733 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
734 NULL_RTX, NULL, if_false_label,
735 prob.invert ());
736 }
737
738 if (!drop_through_if_false)
739 emit_jump (if_false_label);
740 if (drop_through_label)
741 emit_label (drop_through_label);
742}
743
744/* Given a comparison expression EXP for values too wide to be compared
745 with one insn, test the comparison and jump to the appropriate label.
746 The code of EXP is ignored; we always test GT if SWAP is 0,
747 and LT if SWAP is 1. MODE is the mode of the two operands. */
748
749static void
750do_jump_by_parts_greater (scalar_int_mode mode, tree treeop0, tree treeop1,
751 int swap, rtx_code_label *if_false_label,
752 rtx_code_label *if_true_label,
753 profile_probability prob)
754{
755 rtx op0 = expand_normal (swap ? treeop1 : treeop0);
756 rtx op1 = expand_normal (swap ? treeop0 : treeop1);
757 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (treeop0));
758
759 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label,
760 if_true_label, prob);
761}
762
763/* Jump according to whether OP0 is 0. We assume that OP0 has an integer
764 mode, MODE, that is too wide for the available compare insns. Either
765 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL
766 to indicate drop through. */
767
768static void
769do_jump_by_parts_zero_rtx (scalar_int_mode mode, rtx op0,
770 rtx_code_label *if_false_label,
771 rtx_code_label *if_true_label,
772 profile_probability prob)
773{
774 int nwords = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
775 rtx part;
776 int i;
777 rtx_code_label *drop_through_label = NULL;
778
779 /* The fastest way of doing this comparison on almost any machine is to
780 "or" all the words and compare the result. If all have to be loaded
781 from memory and this is a very wide item, it's possible this may
782 be slower, but that's highly unlikely. */
783
784 part = gen_reg_rtx (word_mode);
785 emit_move_insn (part, operand_subword_force (op0, 0, mode));
786 for (i = 1; i < nwords && part != 0; i++)
787 part = expand_binop (word_mode, ior_optab, part,
788 operand_subword_force (op0, i, mode),
789 part, 1, OPTAB_WIDEN);
790
791 if (part != 0)
792 {
793 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
794 NULL_RTX, if_false_label, if_true_label, prob);
795 return;
796 }
797
798 /* If we couldn't do the "or" simply, do this with a series of compares. */
799 if (! if_false_label)
800 if_false_label = drop_through_label = gen_label_rtx ();
801
802 for (i = 0; i < nwords; i++)
803 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
804 const0_rtx, EQ, 1, word_mode, NULL_RTX,
805 if_false_label, NULL, prob);
806
807 if (if_true_label)
808 emit_jump (if_true_label);
809
810 if (drop_through_label)
811 emit_label (drop_through_label);
812}
813
814/* Test for the equality of two RTX expressions OP0 and OP1 in mode MODE,
815 where MODE is an integer mode too wide to be compared with one insn.
816 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
817 to indicate drop through. */
818
819static void
820do_jump_by_parts_equality_rtx (scalar_int_mode mode, rtx op0, rtx op1,
821 rtx_code_label *if_false_label,
822 rtx_code_label *if_true_label,
823 profile_probability prob)
824{
825 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
826 rtx_code_label *drop_through_label = NULL;
827 int i;
828
829 if (op1 == const0_rtx)
830 {
831 do_jump_by_parts_zero_rtx (mode, op0, if_false_label, if_true_label,
832 prob);
833 return;
834 }
835 else if (op0 == const0_rtx)
836 {
837 do_jump_by_parts_zero_rtx (mode, op1, if_false_label, if_true_label,
838 prob);
839 return;
840 }
841
842 if (! if_false_label)
843 drop_through_label = if_false_label = gen_label_rtx ();
844
845 for (i = 0; i < nwords; i++)
846 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
847 operand_subword_force (op1, i, mode),
848 EQ, 0, word_mode, NULL_RTX,
849 if_false_label, NULL, prob);
850
851 if (if_true_label)
852 emit_jump (if_true_label);
853 if (drop_through_label)
854 emit_label (drop_through_label);
855}
856
857/* Given an EQ_EXPR expression EXP for values too wide to be compared
858 with one insn, test the comparison and jump to the appropriate label.
859 MODE is the mode of the two operands. */
860
861static void
862do_jump_by_parts_equality (scalar_int_mode mode, tree treeop0, tree treeop1,
863 rtx_code_label *if_false_label,
864 rtx_code_label *if_true_label,
865 profile_probability prob)
866{
867 rtx op0 = expand_normal (treeop0);
868 rtx op1 = expand_normal (treeop1);
869 do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
870 if_true_label, prob);
871}
872
873/* Split a comparison into two others, the second of which has the other
874 "orderedness". The first is always ORDERED or UNORDERED if MODE
875 does not honor NaNs (which means that it can be skipped in that case;
876 see do_compare_rtx_and_jump).
877
878 The two conditions are written in *CODE1 and *CODE2. Return true if
879 the conditions must be ANDed, false if they must be ORed. */
880
881bool
882split_comparison (enum rtx_code code, machine_mode mode,
883 enum rtx_code *code1, enum rtx_code *code2)
884{
885 switch (code)
886 {
887 case LT:
888 *code1 = ORDERED;
889 *code2 = UNLT;
890 return true;
891 case LE:
892 *code1 = ORDERED;
893 *code2 = UNLE;
894 return true;
895 case GT:
896 *code1 = ORDERED;
897 *code2 = UNGT;
898 return true;
899 case GE:
900 *code1 = ORDERED;
901 *code2 = UNGE;
902 return true;
903 case EQ:
904 *code1 = ORDERED;
905 *code2 = UNEQ;
906 return true;
907 case NE:
908 *code1 = UNORDERED;
909 *code2 = LTGT;
910 return false;
911 case UNLT:
912 *code1 = UNORDERED;
913 *code2 = LT;
914 return false;
915 case UNLE:
916 *code1 = UNORDERED;
917 *code2 = LE;
918 return false;
919 case UNGT:
920 *code1 = UNORDERED;
921 *code2 = GT;
922 return false;
923 case UNGE:
924 *code1 = UNORDERED;
925 *code2 = GE;
926 return false;
927 case UNEQ:
928 *code1 = UNORDERED;
929 *code2 = EQ;
930 return false;
931 case LTGT:
932 /* Do not turn a trapping comparison into a non-trapping one. */
933 if (HONOR_SNANS (mode))
934 {
935 *code1 = LT;
936 *code2 = GT;
937 return false;
938 }
939 else
940 {
941 *code1 = ORDERED;
942 *code2 = NE;
943 return true;
944 }
945 default:
946 gcc_unreachable ();
947 }
948}
949
950
951/* Like do_compare_and_jump but expects the values to compare as two rtx's.
952 The decision as to signed or unsigned comparison must be made by the caller.
953
954 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
955 compared. */
956
957void
958do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
959 machine_mode mode, rtx size,
960 rtx_code_label *if_false_label,
961 rtx_code_label *if_true_label,
962 profile_probability prob)
963{
964 rtx tem;
965 rtx_code_label *dummy_label = NULL;
966
967 /* Reverse the comparison if that is safe and we want to jump if it is
968 false. Also convert to the reverse comparison if the target can
969 implement it. */
970 if ((! if_true_label
971 || ! can_compare_p (code, mode, ccp_jump))
972 && (! FLOAT_MODE_P (mode)
973 || code == ORDERED || code == UNORDERED
974 || (! HONOR_NANS (mode) && (code == LTGT || code == UNEQ))
975 || (! HONOR_SNANS (mode) && (code == EQ || code == NE))))
976 {
977 enum rtx_code rcode;
978 if (FLOAT_MODE_P (mode))
979 rcode = reverse_condition_maybe_unordered (code);
980 else
981 rcode = reverse_condition (code);
982
983 /* Canonicalize to UNORDERED for the libcall. */
984 if (can_compare_p (rcode, mode, ccp_jump)
985 || (code == ORDERED && ! can_compare_p (ORDERED, mode, ccp_jump)))
986 {
987 std::swap (if_true_label, if_false_label);
988 code = rcode;
989 prob = prob.invert ();
990 }
991 }
992
993 /* If one operand is constant, make it the second one. Only do this
994 if the other operand is not constant as well. */
995
996 if (swap_commutative_operands_p (op0, op1))
997 {
998 std::swap (op0, op1);
999 code = swap_condition (code);
1000 }
1001
1002 do_pending_stack_adjust ();
1003
1004 code = unsignedp ? unsigned_condition (code) : code;
1005 if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode,
1006 op0, op1)))
1007 {
1008 if (CONSTANT_P (tem))
1009 {
1010 rtx_code_label *label = (tem == const0_rtx
1011 || tem == CONST0_RTX (mode))
1012 ? if_false_label : if_true_label;
1013 if (label)
1014 emit_jump (label);
1015 return;
1016 }
1017
1018 code = GET_CODE (tem);
1019 mode = GET_MODE (tem);
1020 op0 = XEXP (tem, 0);
1021 op1 = XEXP (tem, 1);
1022 unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU);
1023 }
1024
1025 if (! if_true_label)
1026 dummy_label = if_true_label = gen_label_rtx ();
1027
1028 scalar_int_mode int_mode;
1029 if (is_int_mode (mode, &int_mode)
1030 && ! can_compare_p (code, int_mode, ccp_jump))
1031 {
1032 switch (code)
1033 {
1034 case LTU:
1035 do_jump_by_parts_greater_rtx (int_mode, 1, op1, op0,
1036 if_false_label, if_true_label, prob);
1037 break;
1038
1039 case LEU:
1040 do_jump_by_parts_greater_rtx (int_mode, 1, op0, op1,
1041 if_true_label, if_false_label,
1042 prob.invert ());
1043 break;
1044
1045 case GTU:
1046 do_jump_by_parts_greater_rtx (int_mode, 1, op0, op1,
1047 if_false_label, if_true_label, prob);
1048 break;
1049
1050 case GEU:
1051 do_jump_by_parts_greater_rtx (int_mode, 1, op1, op0,
1052 if_true_label, if_false_label,
1053 prob.invert ());
1054 break;
1055
1056 case LT:
1057 do_jump_by_parts_greater_rtx (int_mode, 0, op1, op0,
1058 if_false_label, if_true_label, prob);
1059 break;
1060
1061 case LE:
1062 do_jump_by_parts_greater_rtx (int_mode, 0, op0, op1,
1063 if_true_label, if_false_label,
1064 prob.invert ());
1065 break;
1066
1067 case GT:
1068 do_jump_by_parts_greater_rtx (int_mode, 0, op0, op1,
1069 if_false_label, if_true_label, prob);
1070 break;
1071
1072 case GE:
1073 do_jump_by_parts_greater_rtx (int_mode, 0, op1, op0,
1074 if_true_label, if_false_label,
1075 prob.invert ());
1076 break;
1077
1078 case EQ:
1079 do_jump_by_parts_equality_rtx (int_mode, op0, op1, if_false_label,
1080 if_true_label, prob);
1081 break;
1082
1083 case NE:
1084 do_jump_by_parts_equality_rtx (int_mode, op0, op1, if_true_label,
1085 if_false_label,
1086 prob.invert ());
1087 break;
1088
1089 default:
1090 gcc_unreachable ();
1091 }
1092 }
1093 else
1094 {
1095 if (SCALAR_FLOAT_MODE_P (mode)
1096 && ! can_compare_p (code, mode, ccp_jump)
1097 && can_compare_p (swap_condition (code), mode, ccp_jump))
1098 {
1099 code = swap_condition (code);
1100 std::swap (op0, op1);
1101 }
1102 else if (SCALAR_FLOAT_MODE_P (mode)
1103 && ! can_compare_p (code, mode, ccp_jump)
1104 /* Never split ORDERED and UNORDERED.
1105 These must be implemented. */
1106 && (code != ORDERED && code != UNORDERED)
1107 /* Split a floating-point comparison if
1108 we can jump on other conditions... */
1109 && (have_insn_for (COMPARE, mode)
1110 /* ... or if there is no libcall for it. */
1111 || code_to_optab (code) == unknown_optab))
1112 {
1113 enum rtx_code first_code;
1114 bool and_them = split_comparison (code, mode, &first_code, &code);
1115
1116 /* If there are no NaNs, the first comparison should always fall
1117 through. */
1118 if (!HONOR_NANS (mode))
1119 gcc_assert (first_code == (and_them ? ORDERED : UNORDERED));
1120
1121 else
1122 {
1123 profile_probability first_prob = prob;
1124 if (first_code == UNORDERED)
1125 first_prob = profile_probability::guessed_always ().apply_scale
1126 (1, 100);
1127 else if (first_code == ORDERED)
1128 first_prob = profile_probability::guessed_always ().apply_scale
1129 (99, 100);
1130 if (and_them)
1131 {
1132 rtx_code_label *dest_label;
1133 /* If we only jump if true, just bypass the second jump. */
1134 if (! if_false_label)
1135 {
1136 if (! dummy_label)
1137 dummy_label = gen_label_rtx ();
1138 dest_label = dummy_label;
1139 }
1140 else
1141 dest_label = if_false_label;
1142 do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode,
1143 size, dest_label, NULL, first_prob);
1144 }
1145 else
1146 do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode,
1147 size, NULL, if_true_label, first_prob);
1148 }
1149 }
1150
1151 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
1152 if_true_label, prob);
1153 }
1154
1155 if (if_false_label)
1156 emit_jump (if_false_label);
1157 if (dummy_label)
1158 emit_label (dummy_label);
1159}
1160
1161/* Generate code for a comparison expression EXP (including code to compute
1162 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
1163 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
1164 generated code will drop through.
1165 SIGNED_CODE should be the rtx operation for this comparison for
1166 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
1167
1168 We force a stack adjustment unless there are currently
1169 things pushed on the stack that aren't yet used. */
1170
1171static void
1172do_compare_and_jump (tree treeop0, tree treeop1, enum rtx_code signed_code,
1173 enum rtx_code unsigned_code,
1174 rtx_code_label *if_false_label,
1175 rtx_code_label *if_true_label, profile_probability prob)
1176{
1177 rtx op0, op1;
1178 tree type;
1179 machine_mode mode;
1180 int unsignedp;
1181 enum rtx_code code;
1182
1183 /* Don't crash if the comparison was erroneous. */
1184 op0 = expand_normal (treeop0);
1185 if (TREE_CODE (treeop0) == ERROR_MARK)
1186 return;
1187
1188 op1 = expand_normal (treeop1);
1189 if (TREE_CODE (treeop1) == ERROR_MARK)
1190 return;
1191
1192 type = TREE_TYPE (treeop0);
1193 if (TREE_CODE (treeop0) == INTEGER_CST
1194 && (TREE_CODE (treeop1) != INTEGER_CST
1195 || (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type))
1196 > GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (treeop1))))))
1197 /* op0 might have been replaced by promoted constant, in which
1198 case the type of second argument should be used. */
1199 type = TREE_TYPE (treeop1);
1200 mode = TYPE_MODE (type);
1201 unsignedp = TYPE_UNSIGNED (type);
1202 code = unsignedp ? unsigned_code : signed_code;
1203
1204 /* If function pointers need to be "canonicalized" before they can
1205 be reliably compared, then canonicalize them.
1206 Only do this if *both* sides of the comparison are function pointers.
1207 If one side isn't, we want a noncanonicalized comparison. See PR
1208 middle-end/17564. */
1209 if (targetm.have_canonicalize_funcptr_for_compare ()
1210 && POINTER_TYPE_P (TREE_TYPE (treeop0))
1211 && POINTER_TYPE_P (TREE_TYPE (treeop1))
1212 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))
1213 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (treeop1))))
1214 {
1215 rtx new_op0 = gen_reg_rtx (mode);
1216 rtx new_op1 = gen_reg_rtx (mode);
1217
1218 emit_insn (targetm.gen_canonicalize_funcptr_for_compare (new_op0, op0));
1219 op0 = new_op0;
1220
1221 emit_insn (targetm.gen_canonicalize_funcptr_for_compare (new_op1, op1));
1222 op1 = new_op1;
1223 }
1224
1225 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
1226 ((mode == BLKmode)
1227 ? expr_size (treeop0) : NULL_RTX),
1228 if_false_label, if_true_label, prob);
1229}
1230
1231#include "gt-dojump.h"
1232