1/* SCC value numbering for trees
2 Copyright (C) 2006-2017 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "backend.h"
25#include "rtl.h"
26#include "tree.h"
27#include "gimple.h"
28#include "alloc-pool.h"
29#include "ssa.h"
30#include "expmed.h"
31#include "insn-config.h"
32#include "memmodel.h"
33#include "emit-rtl.h"
34#include "cgraph.h"
35#include "gimple-pretty-print.h"
36#include "alias.h"
37#include "fold-const.h"
38#include "stor-layout.h"
39#include "cfganal.h"
40#include "tree-inline.h"
41#include "internal-fn.h"
42#include "gimple-fold.h"
43#include "tree-eh.h"
44#include "gimplify.h"
45#include "flags.h"
46#include "dojump.h"
47#include "explow.h"
48#include "calls.h"
49#include "varasm.h"
50#include "stmt.h"
51#include "expr.h"
52#include "tree-dfa.h"
53#include "tree-ssa.h"
54#include "dumpfile.h"
55#include "cfgloop.h"
56#include "params.h"
57#include "tree-ssa-propagate.h"
58#include "tree-cfg.h"
59#include "domwalk.h"
60#include "gimple-iterator.h"
61#include "gimple-match.h"
62#include "stringpool.h"
63#include "attribs.h"
64#include "tree-pass.h"
65#include "statistics.h"
66#include "langhooks.h"
67#include "ipa-utils.h"
68#include "dbgcnt.h"
69#include "tree-cfgcleanup.h"
70#include "tree-ssa-loop.h"
71#include "tree-scalar-evolution.h"
72#include "tree-ssa-sccvn.h"
73
74/* This algorithm is based on the SCC algorithm presented by Keith
75 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
76 (http://citeseer.ist.psu.edu/41805.html). In
77 straight line code, it is equivalent to a regular hash based value
78 numbering that is performed in reverse postorder.
79
80 For code with cycles, there are two alternatives, both of which
81 require keeping the hashtables separate from the actual list of
82 value numbers for SSA names.
83
84 1. Iterate value numbering in an RPO walk of the blocks, removing
85 all the entries from the hashtable after each iteration (but
86 keeping the SSA name->value number mapping between iterations).
87 Iterate until it does not change.
88
89 2. Perform value numbering as part of an SCC walk on the SSA graph,
90 iterating only the cycles in the SSA graph until they do not change
91 (using a separate, optimistic hashtable for value numbering the SCC
92 operands).
93
94 The second is not just faster in practice (because most SSA graph
95 cycles do not involve all the variables in the graph), it also has
96 some nice properties.
97
98 One of these nice properties is that when we pop an SCC off the
99 stack, we are guaranteed to have processed all the operands coming from
100 *outside of that SCC*, so we do not need to do anything special to
101 ensure they have value numbers.
102
103 Another nice property is that the SCC walk is done as part of a DFS
104 of the SSA graph, which makes it easy to perform combining and
105 simplifying operations at the same time.
106
107 The code below is deliberately written in a way that makes it easy
108 to separate the SCC walk from the other work it does.
109
110 In order to propagate constants through the code, we track which
111 expressions contain constants, and use those while folding. In
112 theory, we could also track expressions whose value numbers are
113 replaced, in case we end up folding based on expression
114 identities.
115
116 In order to value number memory, we assign value numbers to vuses.
117 This enables us to note that, for example, stores to the same
118 address of the same value from the same starting memory states are
119 equivalent.
120 TODO:
121
122 1. We can iterate only the changing portions of the SCC's, but
123 I have not seen an SCC big enough for this to be a win.
124 2. If you differentiate between phi nodes for loops and phi nodes
125 for if-then-else, you can properly consider phi nodes in different
126 blocks for equivalence.
127 3. We could value number vuses in more cases, particularly, whole
128 structure copies.
129*/
130
131
132static tree *last_vuse_ptr;
133static vn_lookup_kind vn_walk_kind;
134static vn_lookup_kind default_vn_walk_kind;
135bitmap const_parms;
136
137/* vn_nary_op hashtable helpers. */
138
139struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
140{
141 typedef vn_nary_op_s *compare_type;
142 static inline hashval_t hash (const vn_nary_op_s *);
143 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
144};
145
146/* Return the computed hashcode for nary operation P1. */
147
148inline hashval_t
149vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
150{
151 return vno1->hashcode;
152}
153
154/* Compare nary operations P1 and P2 and return true if they are
155 equivalent. */
156
157inline bool
158vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
159{
160 return vn_nary_op_eq (vno1, vno2);
161}
162
163typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
164typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
165
166
167/* vn_phi hashtable helpers. */
168
169static int
170vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
171
172struct vn_phi_hasher : pointer_hash <vn_phi_s>
173{
174 static inline hashval_t hash (const vn_phi_s *);
175 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
176 static inline void remove (vn_phi_s *);
177};
178
179/* Return the computed hashcode for phi operation P1. */
180
181inline hashval_t
182vn_phi_hasher::hash (const vn_phi_s *vp1)
183{
184 return vp1->hashcode;
185}
186
187/* Compare two phi entries for equality, ignoring VN_TOP arguments. */
188
189inline bool
190vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
191{
192 return vn_phi_eq (vp1, vp2);
193}
194
195/* Free a phi operation structure VP. */
196
197inline void
198vn_phi_hasher::remove (vn_phi_s *phi)
199{
200 phi->phiargs.release ();
201}
202
203typedef hash_table<vn_phi_hasher> vn_phi_table_type;
204typedef vn_phi_table_type::iterator vn_phi_iterator_type;
205
206
207/* Compare two reference operands P1 and P2 for equality. Return true if
208 they are equal, and false otherwise. */
209
210static int
211vn_reference_op_eq (const void *p1, const void *p2)
212{
213 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
214 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
215
216 return (vro1->opcode == vro2->opcode
217 /* We do not care for differences in type qualification. */
218 && (vro1->type == vro2->type
219 || (vro1->type && vro2->type
220 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
221 TYPE_MAIN_VARIANT (vro2->type))))
222 && expressions_equal_p (vro1->op0, vro2->op0)
223 && expressions_equal_p (vro1->op1, vro2->op1)
224 && expressions_equal_p (vro1->op2, vro2->op2));
225}
226
227/* Free a reference operation structure VP. */
228
229static inline void
230free_reference (vn_reference_s *vr)
231{
232 vr->operands.release ();
233}
234
235
236/* vn_reference hashtable helpers. */
237
238struct vn_reference_hasher : pointer_hash <vn_reference_s>
239{
240 static inline hashval_t hash (const vn_reference_s *);
241 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
242 static inline void remove (vn_reference_s *);
243};
244
245/* Return the hashcode for a given reference operation P1. */
246
247inline hashval_t
248vn_reference_hasher::hash (const vn_reference_s *vr1)
249{
250 return vr1->hashcode;
251}
252
253inline bool
254vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
255{
256 return vn_reference_eq (v, c);
257}
258
259inline void
260vn_reference_hasher::remove (vn_reference_s *v)
261{
262 free_reference (v);
263}
264
265typedef hash_table<vn_reference_hasher> vn_reference_table_type;
266typedef vn_reference_table_type::iterator vn_reference_iterator_type;
267
268
269/* The set of hashtables and alloc_pool's for their items. */
270
271typedef struct vn_tables_s
272{
273 vn_nary_op_table_type *nary;
274 vn_phi_table_type *phis;
275 vn_reference_table_type *references;
276 struct obstack nary_obstack;
277 object_allocator<vn_phi_s> *phis_pool;
278 object_allocator<vn_reference_s> *references_pool;
279} *vn_tables_t;
280
281
282/* vn_constant hashtable helpers. */
283
284struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
285{
286 static inline hashval_t hash (const vn_constant_s *);
287 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
288};
289
290/* Hash table hash function for vn_constant_t. */
291
292inline hashval_t
293vn_constant_hasher::hash (const vn_constant_s *vc1)
294{
295 return vc1->hashcode;
296}
297
298/* Hash table equality function for vn_constant_t. */
299
300inline bool
301vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
302{
303 if (vc1->hashcode != vc2->hashcode)
304 return false;
305
306 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
307}
308
309static hash_table<vn_constant_hasher> *constant_to_value_id;
310static bitmap constant_value_ids;
311
312
313/* Valid hashtables storing information we have proven to be
314 correct. */
315
316static vn_tables_t valid_info;
317
318/* Optimistic hashtables storing information we are making assumptions about
319 during iterations. */
320
321static vn_tables_t optimistic_info;
322
323/* Pointer to the set of hashtables that is currently being used.
324 Should always point to either the optimistic_info, or the
325 valid_info. */
326
327static vn_tables_t current_info;
328
329
330/* Reverse post order index for each basic block. */
331
332static int *rpo_numbers;
333
334#define SSA_VAL(x) (VN_INFO ((x))->valnum)
335
336/* Return the SSA value of the VUSE x, supporting released VDEFs
337 during elimination which will value-number the VDEF to the
338 associated VUSE (but not substitute in the whole lattice). */
339
340static inline tree
341vuse_ssa_val (tree x)
342{
343 if (!x)
344 return NULL_TREE;
345
346 do
347 {
348 tree tem = SSA_VAL (x);
349 /* stmt walking can walk over a backedge and reach code we didn't
350 value-number yet. */
351 if (tem == VN_TOP)
352 return x;
353 x = tem;
354 }
355 while (SSA_NAME_IN_FREE_LIST (x));
356
357 return x;
358}
359
360/* This represents the top of the VN lattice, which is the universal
361 value. */
362
363tree VN_TOP;
364
365/* Unique counter for our value ids. */
366
367static unsigned int next_value_id;
368
369/* Next DFS number and the stack for strongly connected component
370 detection. */
371
372static unsigned int next_dfs_num;
373static vec<tree> sccstack;
374
375
376
377/* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
378 are allocated on an obstack for locality reasons, and to free them
379 without looping over the vec. */
380
381static vec<vn_ssa_aux_t> vn_ssa_aux_table;
382static struct obstack vn_ssa_aux_obstack;
383
384/* Return whether there is value numbering information for a given SSA name. */
385
386bool
387has_VN_INFO (tree name)
388{
389 if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
390 return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
391 return false;
392}
393
394/* Return the value numbering information for a given SSA name. */
395
396vn_ssa_aux_t
397VN_INFO (tree name)
398{
399 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
400 gcc_checking_assert (res);
401 return res;
402}
403
404/* Set the value numbering info for a given SSA name to a given
405 value. */
406
407static inline void
408VN_INFO_SET (tree name, vn_ssa_aux_t value)
409{
410 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
411}
412
413/* Initialize the value numbering info for a given SSA name.
414 This should be called just once for every SSA name. */
415
416vn_ssa_aux_t
417VN_INFO_GET (tree name)
418{
419 vn_ssa_aux_t newinfo;
420
421 gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
422 || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
423 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
424 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
425 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
426 vn_ssa_aux_table.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
427 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
428 return newinfo;
429}
430
431
432/* Return the vn_kind the expression computed by the stmt should be
433 associated with. */
434
435enum vn_kind
436vn_get_stmt_kind (gimple *stmt)
437{
438 switch (gimple_code (stmt))
439 {
440 case GIMPLE_CALL:
441 return VN_REFERENCE;
442 case GIMPLE_PHI:
443 return VN_PHI;
444 case GIMPLE_ASSIGN:
445 {
446 enum tree_code code = gimple_assign_rhs_code (stmt);
447 tree rhs1 = gimple_assign_rhs1 (stmt);
448 switch (get_gimple_rhs_class (code))
449 {
450 case GIMPLE_UNARY_RHS:
451 case GIMPLE_BINARY_RHS:
452 case GIMPLE_TERNARY_RHS:
453 return VN_NARY;
454 case GIMPLE_SINGLE_RHS:
455 switch (TREE_CODE_CLASS (code))
456 {
457 case tcc_reference:
458 /* VOP-less references can go through unary case. */
459 if ((code == REALPART_EXPR
460 || code == IMAGPART_EXPR
461 || code == VIEW_CONVERT_EXPR
462 || code == BIT_FIELD_REF)
463 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
464 return VN_NARY;
465
466 /* Fallthrough. */
467 case tcc_declaration:
468 return VN_REFERENCE;
469
470 case tcc_constant:
471 return VN_CONSTANT;
472
473 default:
474 if (code == ADDR_EXPR)
475 return (is_gimple_min_invariant (rhs1)
476 ? VN_CONSTANT : VN_REFERENCE);
477 else if (code == CONSTRUCTOR)
478 return VN_NARY;
479 return VN_NONE;
480 }
481 default:
482 return VN_NONE;
483 }
484 }
485 default:
486 return VN_NONE;
487 }
488}
489
490/* Lookup a value id for CONSTANT and return it. If it does not
491 exist returns 0. */
492
493unsigned int
494get_constant_value_id (tree constant)
495{
496 vn_constant_s **slot;
497 struct vn_constant_s vc;
498
499 vc.hashcode = vn_hash_constant_with_type (constant);
500 vc.constant = constant;
501 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
502 if (slot)
503 return (*slot)->value_id;
504 return 0;
505}
506
507/* Lookup a value id for CONSTANT, and if it does not exist, create a
508 new one and return it. If it does exist, return it. */
509
510unsigned int
511get_or_alloc_constant_value_id (tree constant)
512{
513 vn_constant_s **slot;
514 struct vn_constant_s vc;
515 vn_constant_t vcp;
516
517 vc.hashcode = vn_hash_constant_with_type (constant);
518 vc.constant = constant;
519 slot = constant_to_value_id->find_slot (&vc, INSERT);
520 if (*slot)
521 return (*slot)->value_id;
522
523 vcp = XNEW (struct vn_constant_s);
524 vcp->hashcode = vc.hashcode;
525 vcp->constant = constant;
526 vcp->value_id = get_next_value_id ();
527 *slot = vcp;
528 bitmap_set_bit (constant_value_ids, vcp->value_id);
529 return vcp->value_id;
530}
531
532/* Return true if V is a value id for a constant. */
533
534bool
535value_id_constant_p (unsigned int v)
536{
537 return bitmap_bit_p (constant_value_ids, v);
538}
539
540/* Compute the hash for a reference operand VRO1. */
541
542static void
543vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
544{
545 hstate.add_int (vro1->opcode);
546 if (vro1->op0)
547 inchash::add_expr (vro1->op0, hstate);
548 if (vro1->op1)
549 inchash::add_expr (vro1->op1, hstate);
550 if (vro1->op2)
551 inchash::add_expr (vro1->op2, hstate);
552}
553
554/* Compute a hash for the reference operation VR1 and return it. */
555
556static hashval_t
557vn_reference_compute_hash (const vn_reference_t vr1)
558{
559 inchash::hash hstate;
560 hashval_t result;
561 int i;
562 vn_reference_op_t vro;
563 HOST_WIDE_INT off = -1;
564 bool deref = false;
565
566 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
567 {
568 if (vro->opcode == MEM_REF)
569 deref = true;
570 else if (vro->opcode != ADDR_EXPR)
571 deref = false;
572 if (vro->off != -1)
573 {
574 if (off == -1)
575 off = 0;
576 off += vro->off;
577 }
578 else
579 {
580 if (off != -1
581 && off != 0)
582 hstate.add_int (off);
583 off = -1;
584 if (deref
585 && vro->opcode == ADDR_EXPR)
586 {
587 if (vro->op0)
588 {
589 tree op = TREE_OPERAND (vro->op0, 0);
590 hstate.add_int (TREE_CODE (op));
591 inchash::add_expr (op, hstate);
592 }
593 }
594 else
595 vn_reference_op_compute_hash (vro, hstate);
596 }
597 }
598 result = hstate.end ();
599 /* ??? We would ICE later if we hash instead of adding that in. */
600 if (vr1->vuse)
601 result += SSA_NAME_VERSION (vr1->vuse);
602
603 return result;
604}
605
606/* Return true if reference operations VR1 and VR2 are equivalent. This
607 means they have the same set of operands and vuses. */
608
609bool
610vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
611{
612 unsigned i, j;
613
614 /* Early out if this is not a hash collision. */
615 if (vr1->hashcode != vr2->hashcode)
616 return false;
617
618 /* The VOP needs to be the same. */
619 if (vr1->vuse != vr2->vuse)
620 return false;
621
622 /* If the operands are the same we are done. */
623 if (vr1->operands == vr2->operands)
624 return true;
625
626 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
627 return false;
628
629 if (INTEGRAL_TYPE_P (vr1->type)
630 && INTEGRAL_TYPE_P (vr2->type))
631 {
632 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
633 return false;
634 }
635 else if (INTEGRAL_TYPE_P (vr1->type)
636 && (TYPE_PRECISION (vr1->type)
637 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
638 return false;
639 else if (INTEGRAL_TYPE_P (vr2->type)
640 && (TYPE_PRECISION (vr2->type)
641 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
642 return false;
643
644 i = 0;
645 j = 0;
646 do
647 {
648 HOST_WIDE_INT off1 = 0, off2 = 0;
649 vn_reference_op_t vro1, vro2;
650 vn_reference_op_s tem1, tem2;
651 bool deref1 = false, deref2 = false;
652 for (; vr1->operands.iterate (i, &vro1); i++)
653 {
654 if (vro1->opcode == MEM_REF)
655 deref1 = true;
656 /* Do not look through a storage order barrier. */
657 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
658 return false;
659 if (vro1->off == -1)
660 break;
661 off1 += vro1->off;
662 }
663 for (; vr2->operands.iterate (j, &vro2); j++)
664 {
665 if (vro2->opcode == MEM_REF)
666 deref2 = true;
667 /* Do not look through a storage order barrier. */
668 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
669 return false;
670 if (vro2->off == -1)
671 break;
672 off2 += vro2->off;
673 }
674 if (off1 != off2)
675 return false;
676 if (deref1 && vro1->opcode == ADDR_EXPR)
677 {
678 memset (&tem1, 0, sizeof (tem1));
679 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
680 tem1.type = TREE_TYPE (tem1.op0);
681 tem1.opcode = TREE_CODE (tem1.op0);
682 vro1 = &tem1;
683 deref1 = false;
684 }
685 if (deref2 && vro2->opcode == ADDR_EXPR)
686 {
687 memset (&tem2, 0, sizeof (tem2));
688 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
689 tem2.type = TREE_TYPE (tem2.op0);
690 tem2.opcode = TREE_CODE (tem2.op0);
691 vro2 = &tem2;
692 deref2 = false;
693 }
694 if (deref1 != deref2)
695 return false;
696 if (!vn_reference_op_eq (vro1, vro2))
697 return false;
698 ++j;
699 ++i;
700 }
701 while (vr1->operands.length () != i
702 || vr2->operands.length () != j);
703
704 return true;
705}
706
707/* Copy the operations present in load/store REF into RESULT, a vector of
708 vn_reference_op_s's. */
709
710static void
711copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
712{
713 if (TREE_CODE (ref) == TARGET_MEM_REF)
714 {
715 vn_reference_op_s temp;
716
717 result->reserve (3);
718
719 memset (&temp, 0, sizeof (temp));
720 temp.type = TREE_TYPE (ref);
721 temp.opcode = TREE_CODE (ref);
722 temp.op0 = TMR_INDEX (ref);
723 temp.op1 = TMR_STEP (ref);
724 temp.op2 = TMR_OFFSET (ref);
725 temp.off = -1;
726 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
727 temp.base = MR_DEPENDENCE_BASE (ref);
728 result->quick_push (temp);
729
730 memset (&temp, 0, sizeof (temp));
731 temp.type = NULL_TREE;
732 temp.opcode = ERROR_MARK;
733 temp.op0 = TMR_INDEX2 (ref);
734 temp.off = -1;
735 result->quick_push (temp);
736
737 memset (&temp, 0, sizeof (temp));
738 temp.type = NULL_TREE;
739 temp.opcode = TREE_CODE (TMR_BASE (ref));
740 temp.op0 = TMR_BASE (ref);
741 temp.off = -1;
742 result->quick_push (temp);
743 return;
744 }
745
746 /* For non-calls, store the information that makes up the address. */
747 tree orig = ref;
748 while (ref)
749 {
750 vn_reference_op_s temp;
751
752 memset (&temp, 0, sizeof (temp));
753 temp.type = TREE_TYPE (ref);
754 temp.opcode = TREE_CODE (ref);
755 temp.off = -1;
756
757 switch (temp.opcode)
758 {
759 case MODIFY_EXPR:
760 temp.op0 = TREE_OPERAND (ref, 1);
761 break;
762 case WITH_SIZE_EXPR:
763 temp.op0 = TREE_OPERAND (ref, 1);
764 temp.off = 0;
765 break;
766 case MEM_REF:
767 /* The base address gets its own vn_reference_op_s structure. */
768 temp.op0 = TREE_OPERAND (ref, 1);
769 {
770 offset_int off = mem_ref_offset (ref);
771 if (wi::fits_shwi_p (off))
772 temp.off = off.to_shwi ();
773 }
774 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
775 temp.base = MR_DEPENDENCE_BASE (ref);
776 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
777 break;
778 case BIT_FIELD_REF:
779 /* Record bits, position and storage order. */
780 temp.op0 = TREE_OPERAND (ref, 1);
781 temp.op1 = TREE_OPERAND (ref, 2);
782 if (tree_fits_shwi_p (TREE_OPERAND (ref, 2)))
783 {
784 HOST_WIDE_INT off = tree_to_shwi (TREE_OPERAND (ref, 2));
785 if (off % BITS_PER_UNIT == 0)
786 temp.off = off / BITS_PER_UNIT;
787 }
788 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
789 break;
790 case COMPONENT_REF:
791 /* The field decl is enough to unambiguously specify the field,
792 a matching type is not necessary and a mismatching type
793 is always a spurious difference. */
794 temp.type = NULL_TREE;
795 temp.op0 = TREE_OPERAND (ref, 1);
796 temp.op1 = TREE_OPERAND (ref, 2);
797 {
798 tree this_offset = component_ref_field_offset (ref);
799 if (this_offset
800 && TREE_CODE (this_offset) == INTEGER_CST)
801 {
802 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
803 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
804 {
805 offset_int off
806 = (wi::to_offset (this_offset)
807 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
808 if (wi::fits_shwi_p (off)
809 /* Probibit value-numbering zero offset components
810 of addresses the same before the pass folding
811 __builtin_object_size had a chance to run
812 (checking cfun->after_inlining does the
813 trick here). */
814 && (TREE_CODE (orig) != ADDR_EXPR
815 || off != 0
816 || cfun->after_inlining))
817 temp.off = off.to_shwi ();
818 }
819 }
820 }
821 break;
822 case ARRAY_RANGE_REF:
823 case ARRAY_REF:
824 {
825 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
826 /* Record index as operand. */
827 temp.op0 = TREE_OPERAND (ref, 1);
828 /* Always record lower bounds and element size. */
829 temp.op1 = array_ref_low_bound (ref);
830 /* But record element size in units of the type alignment. */
831 temp.op2 = TREE_OPERAND (ref, 3);
832 temp.align = eltype->type_common.align;
833 if (! temp.op2)
834 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
835 size_int (TYPE_ALIGN_UNIT (eltype)));
836 if (TREE_CODE (temp.op0) == INTEGER_CST
837 && TREE_CODE (temp.op1) == INTEGER_CST
838 && TREE_CODE (temp.op2) == INTEGER_CST)
839 {
840 offset_int off = ((wi::to_offset (temp.op0)
841 - wi::to_offset (temp.op1))
842 * wi::to_offset (temp.op2)
843 * vn_ref_op_align_unit (&temp));
844 if (wi::fits_shwi_p (off))
845 temp.off = off.to_shwi();
846 }
847 }
848 break;
849 case VAR_DECL:
850 if (DECL_HARD_REGISTER (ref))
851 {
852 temp.op0 = ref;
853 break;
854 }
855 /* Fallthru. */
856 case PARM_DECL:
857 case CONST_DECL:
858 case RESULT_DECL:
859 /* Canonicalize decls to MEM[&decl] which is what we end up with
860 when valueizing MEM[ptr] with ptr = &decl. */
861 temp.opcode = MEM_REF;
862 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
863 temp.off = 0;
864 result->safe_push (temp);
865 temp.opcode = ADDR_EXPR;
866 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
867 temp.type = TREE_TYPE (temp.op0);
868 temp.off = -1;
869 break;
870 case STRING_CST:
871 case INTEGER_CST:
872 case COMPLEX_CST:
873 case VECTOR_CST:
874 case REAL_CST:
875 case FIXED_CST:
876 case CONSTRUCTOR:
877 case SSA_NAME:
878 temp.op0 = ref;
879 break;
880 case ADDR_EXPR:
881 if (is_gimple_min_invariant (ref))
882 {
883 temp.op0 = ref;
884 break;
885 }
886 break;
887 /* These are only interesting for their operands, their
888 existence, and their type. They will never be the last
889 ref in the chain of references (IE they require an
890 operand), so we don't have to put anything
891 for op* as it will be handled by the iteration */
892 case REALPART_EXPR:
893 temp.off = 0;
894 break;
895 case VIEW_CONVERT_EXPR:
896 temp.off = 0;
897 temp.reverse = storage_order_barrier_p (ref);
898 break;
899 case IMAGPART_EXPR:
900 /* This is only interesting for its constant offset. */
901 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
902 break;
903 default:
904 gcc_unreachable ();
905 }
906 result->safe_push (temp);
907
908 if (REFERENCE_CLASS_P (ref)
909 || TREE_CODE (ref) == MODIFY_EXPR
910 || TREE_CODE (ref) == WITH_SIZE_EXPR
911 || (TREE_CODE (ref) == ADDR_EXPR
912 && !is_gimple_min_invariant (ref)))
913 ref = TREE_OPERAND (ref, 0);
914 else
915 ref = NULL_TREE;
916 }
917}
918
919/* Build a alias-oracle reference abstraction in *REF from the vn_reference
920 operands in *OPS, the reference alias set SET and the reference type TYPE.
921 Return true if something useful was produced. */
922
923bool
924ao_ref_init_from_vn_reference (ao_ref *ref,
925 alias_set_type set, tree type,
926 vec<vn_reference_op_s> ops)
927{
928 vn_reference_op_t op;
929 unsigned i;
930 tree base = NULL_TREE;
931 tree *op0_p = &base;
932 offset_int offset = 0;
933 offset_int max_size;
934 offset_int size = -1;
935 tree size_tree = NULL_TREE;
936 alias_set_type base_alias_set = -1;
937
938 /* First get the final access size from just the outermost expression. */
939 op = &ops[0];
940 if (op->opcode == COMPONENT_REF)
941 size_tree = DECL_SIZE (op->op0);
942 else if (op->opcode == BIT_FIELD_REF)
943 size_tree = op->op0;
944 else
945 {
946 machine_mode mode = TYPE_MODE (type);
947 if (mode == BLKmode)
948 size_tree = TYPE_SIZE (type);
949 else
950 size = int (GET_MODE_BITSIZE (mode));
951 }
952 if (size_tree != NULL_TREE
953 && TREE_CODE (size_tree) == INTEGER_CST)
954 size = wi::to_offset (size_tree);
955
956 /* Initially, maxsize is the same as the accessed element size.
957 In the following it will only grow (or become -1). */
958 max_size = size;
959
960 /* Compute cumulative bit-offset for nested component-refs and array-refs,
961 and find the ultimate containing object. */
962 FOR_EACH_VEC_ELT (ops, i, op)
963 {
964 switch (op->opcode)
965 {
966 /* These may be in the reference ops, but we cannot do anything
967 sensible with them here. */
968 case ADDR_EXPR:
969 /* Apart from ADDR_EXPR arguments to MEM_REF. */
970 if (base != NULL_TREE
971 && TREE_CODE (base) == MEM_REF
972 && op->op0
973 && DECL_P (TREE_OPERAND (op->op0, 0)))
974 {
975 vn_reference_op_t pop = &ops[i-1];
976 base = TREE_OPERAND (op->op0, 0);
977 if (pop->off == -1)
978 {
979 max_size = -1;
980 offset = 0;
981 }
982 else
983 offset += pop->off * BITS_PER_UNIT;
984 op0_p = NULL;
985 break;
986 }
987 /* Fallthru. */
988 case CALL_EXPR:
989 return false;
990
991 /* Record the base objects. */
992 case MEM_REF:
993 base_alias_set = get_deref_alias_set (op->op0);
994 *op0_p = build2 (MEM_REF, op->type,
995 NULL_TREE, op->op0);
996 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
997 MR_DEPENDENCE_BASE (*op0_p) = op->base;
998 op0_p = &TREE_OPERAND (*op0_p, 0);
999 break;
1000
1001 case VAR_DECL:
1002 case PARM_DECL:
1003 case RESULT_DECL:
1004 case SSA_NAME:
1005 *op0_p = op->op0;
1006 op0_p = NULL;
1007 break;
1008
1009 /* And now the usual component-reference style ops. */
1010 case BIT_FIELD_REF:
1011 offset += wi::to_offset (op->op1);
1012 break;
1013
1014 case COMPONENT_REF:
1015 {
1016 tree field = op->op0;
1017 /* We do not have a complete COMPONENT_REF tree here so we
1018 cannot use component_ref_field_offset. Do the interesting
1019 parts manually. */
1020 tree this_offset = DECL_FIELD_OFFSET (field);
1021
1022 if (op->op1 || TREE_CODE (this_offset) != INTEGER_CST)
1023 max_size = -1;
1024 else
1025 {
1026 offset_int woffset = (wi::to_offset (this_offset)
1027 << LOG2_BITS_PER_UNIT);
1028 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1029 offset += woffset;
1030 }
1031 break;
1032 }
1033
1034 case ARRAY_RANGE_REF:
1035 case ARRAY_REF:
1036 /* We recorded the lower bound and the element size. */
1037 if (TREE_CODE (op->op0) != INTEGER_CST
1038 || TREE_CODE (op->op1) != INTEGER_CST
1039 || TREE_CODE (op->op2) != INTEGER_CST)
1040 max_size = -1;
1041 else
1042 {
1043 offset_int woffset
1044 = wi::sext (wi::to_offset (op->op0) - wi::to_offset (op->op1),
1045 TYPE_PRECISION (TREE_TYPE (op->op0)));
1046 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1047 woffset <<= LOG2_BITS_PER_UNIT;
1048 offset += woffset;
1049 }
1050 break;
1051
1052 case REALPART_EXPR:
1053 break;
1054
1055 case IMAGPART_EXPR:
1056 offset += size;
1057 break;
1058
1059 case VIEW_CONVERT_EXPR:
1060 break;
1061
1062 case STRING_CST:
1063 case INTEGER_CST:
1064 case COMPLEX_CST:
1065 case VECTOR_CST:
1066 case REAL_CST:
1067 case CONSTRUCTOR:
1068 case CONST_DECL:
1069 return false;
1070
1071 default:
1072 return false;
1073 }
1074 }
1075
1076 if (base == NULL_TREE)
1077 return false;
1078
1079 ref->ref = NULL_TREE;
1080 ref->base = base;
1081 ref->ref_alias_set = set;
1082 if (base_alias_set != -1)
1083 ref->base_alias_set = base_alias_set;
1084 else
1085 ref->base_alias_set = get_alias_set (base);
1086 /* We discount volatiles from value-numbering elsewhere. */
1087 ref->volatile_p = false;
1088
1089 if (!wi::fits_shwi_p (size) || wi::neg_p (size))
1090 {
1091 ref->offset = 0;
1092 ref->size = -1;
1093 ref->max_size = -1;
1094 return true;
1095 }
1096
1097 ref->size = size.to_shwi ();
1098
1099 if (!wi::fits_shwi_p (offset))
1100 {
1101 ref->offset = 0;
1102 ref->max_size = -1;
1103 return true;
1104 }
1105
1106 ref->offset = offset.to_shwi ();
1107
1108 if (!wi::fits_shwi_p (max_size) || wi::neg_p (max_size))
1109 ref->max_size = -1;
1110 else
1111 ref->max_size = max_size.to_shwi ();
1112
1113 return true;
1114}
1115
1116/* Copy the operations present in load/store/call REF into RESULT, a vector of
1117 vn_reference_op_s's. */
1118
1119static void
1120copy_reference_ops_from_call (gcall *call,
1121 vec<vn_reference_op_s> *result)
1122{
1123 vn_reference_op_s temp;
1124 unsigned i;
1125 tree lhs = gimple_call_lhs (call);
1126 int lr;
1127
1128 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1129 different. By adding the lhs here in the vector, we ensure that the
1130 hashcode is different, guaranteeing a different value number. */
1131 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1132 {
1133 memset (&temp, 0, sizeof (temp));
1134 temp.opcode = MODIFY_EXPR;
1135 temp.type = TREE_TYPE (lhs);
1136 temp.op0 = lhs;
1137 temp.off = -1;
1138 result->safe_push (temp);
1139 }
1140
1141 /* Copy the type, opcode, function, static chain and EH region, if any. */
1142 memset (&temp, 0, sizeof (temp));
1143 temp.type = gimple_call_return_type (call);
1144 temp.opcode = CALL_EXPR;
1145 temp.op0 = gimple_call_fn (call);
1146 temp.op1 = gimple_call_chain (call);
1147 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1148 temp.op2 = size_int (lr);
1149 temp.off = -1;
1150 if (gimple_call_with_bounds_p (call))
1151 temp.with_bounds = 1;
1152 result->safe_push (temp);
1153
1154 /* Copy the call arguments. As they can be references as well,
1155 just chain them together. */
1156 for (i = 0; i < gimple_call_num_args (call); ++i)
1157 {
1158 tree callarg = gimple_call_arg (call, i);
1159 copy_reference_ops_from_ref (callarg, result);
1160 }
1161}
1162
1163/* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1164 *I_P to point to the last element of the replacement. */
1165static bool
1166vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1167 unsigned int *i_p)
1168{
1169 unsigned int i = *i_p;
1170 vn_reference_op_t op = &(*ops)[i];
1171 vn_reference_op_t mem_op = &(*ops)[i - 1];
1172 tree addr_base;
1173 HOST_WIDE_INT addr_offset = 0;
1174
1175 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1176 from .foo.bar to the preceding MEM_REF offset and replace the
1177 address with &OBJ. */
1178 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1179 &addr_offset);
1180 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1181 if (addr_base != TREE_OPERAND (op->op0, 0))
1182 {
1183 offset_int off = offset_int::from (wi::to_wide (mem_op->op0), SIGNED);
1184 off += addr_offset;
1185 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1186 op->op0 = build_fold_addr_expr (addr_base);
1187 if (tree_fits_shwi_p (mem_op->op0))
1188 mem_op->off = tree_to_shwi (mem_op->op0);
1189 else
1190 mem_op->off = -1;
1191 return true;
1192 }
1193 return false;
1194}
1195
1196/* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1197 *I_P to point to the last element of the replacement. */
1198static bool
1199vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1200 unsigned int *i_p)
1201{
1202 unsigned int i = *i_p;
1203 vn_reference_op_t op = &(*ops)[i];
1204 vn_reference_op_t mem_op = &(*ops)[i - 1];
1205 gimple *def_stmt;
1206 enum tree_code code;
1207 offset_int off;
1208
1209 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1210 if (!is_gimple_assign (def_stmt))
1211 return false;
1212
1213 code = gimple_assign_rhs_code (def_stmt);
1214 if (code != ADDR_EXPR
1215 && code != POINTER_PLUS_EXPR)
1216 return false;
1217
1218 off = offset_int::from (wi::to_wide (mem_op->op0), SIGNED);
1219
1220 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1221 from .foo.bar to the preceding MEM_REF offset and replace the
1222 address with &OBJ. */
1223 if (code == ADDR_EXPR)
1224 {
1225 tree addr, addr_base;
1226 HOST_WIDE_INT addr_offset;
1227
1228 addr = gimple_assign_rhs1 (def_stmt);
1229 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1230 &addr_offset);
1231 /* If that didn't work because the address isn't invariant propagate
1232 the reference tree from the address operation in case the current
1233 dereference isn't offsetted. */
1234 if (!addr_base
1235 && *i_p == ops->length () - 1
1236 && off == 0
1237 /* This makes us disable this transform for PRE where the
1238 reference ops might be also used for code insertion which
1239 is invalid. */
1240 && default_vn_walk_kind == VN_WALKREWRITE)
1241 {
1242 auto_vec<vn_reference_op_s, 32> tem;
1243 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1244 /* Make sure to preserve TBAA info. The only objects not
1245 wrapped in MEM_REFs that can have their address taken are
1246 STRING_CSTs. */
1247 if (tem.length () >= 2
1248 && tem[tem.length () - 2].opcode == MEM_REF)
1249 {
1250 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1251 new_mem_op->op0
1252 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1253 wi::to_wide (new_mem_op->op0));
1254 }
1255 else
1256 gcc_assert (tem.last ().opcode == STRING_CST);
1257 ops->pop ();
1258 ops->pop ();
1259 ops->safe_splice (tem);
1260 --*i_p;
1261 return true;
1262 }
1263 if (!addr_base
1264 || TREE_CODE (addr_base) != MEM_REF)
1265 return false;
1266
1267 off += addr_offset;
1268 off += mem_ref_offset (addr_base);
1269 op->op0 = TREE_OPERAND (addr_base, 0);
1270 }
1271 else
1272 {
1273 tree ptr, ptroff;
1274 ptr = gimple_assign_rhs1 (def_stmt);
1275 ptroff = gimple_assign_rhs2 (def_stmt);
1276 if (TREE_CODE (ptr) != SSA_NAME
1277 || TREE_CODE (ptroff) != INTEGER_CST)
1278 return false;
1279
1280 off += wi::to_offset (ptroff);
1281 op->op0 = ptr;
1282 }
1283
1284 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1285 if (tree_fits_shwi_p (mem_op->op0))
1286 mem_op->off = tree_to_shwi (mem_op->op0);
1287 else
1288 mem_op->off = -1;
1289 if (TREE_CODE (op->op0) == SSA_NAME)
1290 op->op0 = SSA_VAL (op->op0);
1291 if (TREE_CODE (op->op0) != SSA_NAME)
1292 op->opcode = TREE_CODE (op->op0);
1293
1294 /* And recurse. */
1295 if (TREE_CODE (op->op0) == SSA_NAME)
1296 vn_reference_maybe_forwprop_address (ops, i_p);
1297 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1298 vn_reference_fold_indirect (ops, i_p);
1299 return true;
1300}
1301
1302/* Optimize the reference REF to a constant if possible or return
1303 NULL_TREE if not. */
1304
1305tree
1306fully_constant_vn_reference_p (vn_reference_t ref)
1307{
1308 vec<vn_reference_op_s> operands = ref->operands;
1309 vn_reference_op_t op;
1310
1311 /* Try to simplify the translated expression if it is
1312 a call to a builtin function with at most two arguments. */
1313 op = &operands[0];
1314 if (op->opcode == CALL_EXPR
1315 && TREE_CODE (op->op0) == ADDR_EXPR
1316 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1317 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1318 && operands.length () >= 2
1319 && operands.length () <= 3)
1320 {
1321 vn_reference_op_t arg0, arg1 = NULL;
1322 bool anyconst = false;
1323 arg0 = &operands[1];
1324 if (operands.length () > 2)
1325 arg1 = &operands[2];
1326 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1327 || (arg0->opcode == ADDR_EXPR
1328 && is_gimple_min_invariant (arg0->op0)))
1329 anyconst = true;
1330 if (arg1
1331 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1332 || (arg1->opcode == ADDR_EXPR
1333 && is_gimple_min_invariant (arg1->op0))))
1334 anyconst = true;
1335 if (anyconst)
1336 {
1337 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1338 arg1 ? 2 : 1,
1339 arg0->op0,
1340 arg1 ? arg1->op0 : NULL);
1341 if (folded
1342 && TREE_CODE (folded) == NOP_EXPR)
1343 folded = TREE_OPERAND (folded, 0);
1344 if (folded
1345 && is_gimple_min_invariant (folded))
1346 return folded;
1347 }
1348 }
1349
1350 /* Simplify reads from constants or constant initializers. */
1351 else if (BITS_PER_UNIT == 8
1352 && is_gimple_reg_type (ref->type)
1353 && (!INTEGRAL_TYPE_P (ref->type)
1354 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1355 {
1356 HOST_WIDE_INT off = 0;
1357 HOST_WIDE_INT size;
1358 if (INTEGRAL_TYPE_P (ref->type))
1359 size = TYPE_PRECISION (ref->type);
1360 else
1361 size = tree_to_shwi (TYPE_SIZE (ref->type));
1362 if (size % BITS_PER_UNIT != 0
1363 || size > MAX_BITSIZE_MODE_ANY_MODE)
1364 return NULL_TREE;
1365 size /= BITS_PER_UNIT;
1366 unsigned i;
1367 for (i = 0; i < operands.length (); ++i)
1368 {
1369 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1370 {
1371 ++i;
1372 break;
1373 }
1374 if (operands[i].off == -1)
1375 return NULL_TREE;
1376 off += operands[i].off;
1377 if (operands[i].opcode == MEM_REF)
1378 {
1379 ++i;
1380 break;
1381 }
1382 }
1383 vn_reference_op_t base = &operands[--i];
1384 tree ctor = error_mark_node;
1385 tree decl = NULL_TREE;
1386 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1387 ctor = base->op0;
1388 else if (base->opcode == MEM_REF
1389 && base[1].opcode == ADDR_EXPR
1390 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1391 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1392 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1393 {
1394 decl = TREE_OPERAND (base[1].op0, 0);
1395 if (TREE_CODE (decl) == STRING_CST)
1396 ctor = decl;
1397 else
1398 ctor = ctor_for_folding (decl);
1399 }
1400 if (ctor == NULL_TREE)
1401 return build_zero_cst (ref->type);
1402 else if (ctor != error_mark_node)
1403 {
1404 if (decl)
1405 {
1406 tree res = fold_ctor_reference (ref->type, ctor,
1407 off * BITS_PER_UNIT,
1408 size * BITS_PER_UNIT, decl);
1409 if (res)
1410 {
1411 STRIP_USELESS_TYPE_CONVERSION (res);
1412 if (is_gimple_min_invariant (res))
1413 return res;
1414 }
1415 }
1416 else
1417 {
1418 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1419 int len = native_encode_expr (ctor, buf, size, off);
1420 if (len > 0)
1421 return native_interpret_expr (ref->type, buf, len);
1422 }
1423 }
1424 }
1425
1426 return NULL_TREE;
1427}
1428
1429/* Return true if OPS contain a storage order barrier. */
1430
1431static bool
1432contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1433{
1434 vn_reference_op_t op;
1435 unsigned i;
1436
1437 FOR_EACH_VEC_ELT (ops, i, op)
1438 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1439 return true;
1440
1441 return false;
1442}
1443
1444/* Transform any SSA_NAME's in a vector of vn_reference_op_s
1445 structures into their value numbers. This is done in-place, and
1446 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1447 whether any operands were valueized. */
1448
1449static vec<vn_reference_op_s>
1450valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1451{
1452 vn_reference_op_t vro;
1453 unsigned int i;
1454
1455 *valueized_anything = false;
1456
1457 FOR_EACH_VEC_ELT (orig, i, vro)
1458 {
1459 if (vro->opcode == SSA_NAME
1460 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1461 {
1462 tree tem = SSA_VAL (vro->op0);
1463 if (tem != vro->op0)
1464 {
1465 *valueized_anything = true;
1466 vro->op0 = tem;
1467 }
1468 /* If it transforms from an SSA_NAME to a constant, update
1469 the opcode. */
1470 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1471 vro->opcode = TREE_CODE (vro->op0);
1472 }
1473 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1474 {
1475 tree tem = SSA_VAL (vro->op1);
1476 if (tem != vro->op1)
1477 {
1478 *valueized_anything = true;
1479 vro->op1 = tem;
1480 }
1481 }
1482 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1483 {
1484 tree tem = SSA_VAL (vro->op2);
1485 if (tem != vro->op2)
1486 {
1487 *valueized_anything = true;
1488 vro->op2 = tem;
1489 }
1490 }
1491 /* If it transforms from an SSA_NAME to an address, fold with
1492 a preceding indirect reference. */
1493 if (i > 0
1494 && vro->op0
1495 && TREE_CODE (vro->op0) == ADDR_EXPR
1496 && orig[i - 1].opcode == MEM_REF)
1497 {
1498 if (vn_reference_fold_indirect (&orig, &i))
1499 *valueized_anything = true;
1500 }
1501 else if (i > 0
1502 && vro->opcode == SSA_NAME
1503 && orig[i - 1].opcode == MEM_REF)
1504 {
1505 if (vn_reference_maybe_forwprop_address (&orig, &i))
1506 *valueized_anything = true;
1507 }
1508 /* If it transforms a non-constant ARRAY_REF into a constant
1509 one, adjust the constant offset. */
1510 else if (vro->opcode == ARRAY_REF
1511 && vro->off == -1
1512 && TREE_CODE (vro->op0) == INTEGER_CST
1513 && TREE_CODE (vro->op1) == INTEGER_CST
1514 && TREE_CODE (vro->op2) == INTEGER_CST)
1515 {
1516 offset_int off = ((wi::to_offset (vro->op0)
1517 - wi::to_offset (vro->op1))
1518 * wi::to_offset (vro->op2)
1519 * vn_ref_op_align_unit (vro));
1520 if (wi::fits_shwi_p (off))
1521 vro->off = off.to_shwi ();
1522 }
1523 }
1524
1525 return orig;
1526}
1527
1528static vec<vn_reference_op_s>
1529valueize_refs (vec<vn_reference_op_s> orig)
1530{
1531 bool tem;
1532 return valueize_refs_1 (orig, &tem);
1533}
1534
1535static vec<vn_reference_op_s> shared_lookup_references;
1536
1537/* Create a vector of vn_reference_op_s structures from REF, a
1538 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1539 this function. *VALUEIZED_ANYTHING will specify whether any
1540 operands were valueized. */
1541
1542static vec<vn_reference_op_s>
1543valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1544{
1545 if (!ref)
1546 return vNULL;
1547 shared_lookup_references.truncate (0);
1548 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1549 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1550 valueized_anything);
1551 return shared_lookup_references;
1552}
1553
1554/* Create a vector of vn_reference_op_s structures from CALL, a
1555 call statement. The vector is shared among all callers of
1556 this function. */
1557
1558static vec<vn_reference_op_s>
1559valueize_shared_reference_ops_from_call (gcall *call)
1560{
1561 if (!call)
1562 return vNULL;
1563 shared_lookup_references.truncate (0);
1564 copy_reference_ops_from_call (call, &shared_lookup_references);
1565 shared_lookup_references = valueize_refs (shared_lookup_references);
1566 return shared_lookup_references;
1567}
1568
1569/* Lookup a SCCVN reference operation VR in the current hash table.
1570 Returns the resulting value number if it exists in the hash table,
1571 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1572 vn_reference_t stored in the hashtable if something is found. */
1573
1574static tree
1575vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1576{
1577 vn_reference_s **slot;
1578 hashval_t hash;
1579
1580 hash = vr->hashcode;
1581 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1582 if (!slot && current_info == optimistic_info)
1583 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1584 if (slot)
1585 {
1586 if (vnresult)
1587 *vnresult = (vn_reference_t)*slot;
1588 return ((vn_reference_t)*slot)->result;
1589 }
1590
1591 return NULL_TREE;
1592}
1593
1594/* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1595 with the current VUSE and performs the expression lookup. */
1596
1597static void *
1598vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1599 unsigned int cnt, void *vr_)
1600{
1601 vn_reference_t vr = (vn_reference_t)vr_;
1602 vn_reference_s **slot;
1603 hashval_t hash;
1604
1605 /* This bounds the stmt walks we perform on reference lookups
1606 to O(1) instead of O(N) where N is the number of dominating
1607 stores. */
1608 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1609 return (void *)-1;
1610
1611 if (last_vuse_ptr)
1612 *last_vuse_ptr = vuse;
1613
1614 /* Fixup vuse and hash. */
1615 if (vr->vuse)
1616 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1617 vr->vuse = vuse_ssa_val (vuse);
1618 if (vr->vuse)
1619 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1620
1621 hash = vr->hashcode;
1622 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1623 if (!slot && current_info == optimistic_info)
1624 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1625 if (slot)
1626 return *slot;
1627
1628 return NULL;
1629}
1630
1631/* Lookup an existing or insert a new vn_reference entry into the
1632 value table for the VUSE, SET, TYPE, OPERANDS reference which
1633 has the value VALUE which is either a constant or an SSA name. */
1634
1635static vn_reference_t
1636vn_reference_lookup_or_insert_for_pieces (tree vuse,
1637 alias_set_type set,
1638 tree type,
1639 vec<vn_reference_op_s,
1640 va_heap> operands,
1641 tree value)
1642{
1643 vn_reference_s vr1;
1644 vn_reference_t result;
1645 unsigned value_id;
1646 vr1.vuse = vuse;
1647 vr1.operands = operands;
1648 vr1.type = type;
1649 vr1.set = set;
1650 vr1.hashcode = vn_reference_compute_hash (&vr1);
1651 if (vn_reference_lookup_1 (&vr1, &result))
1652 return result;
1653 if (TREE_CODE (value) == SSA_NAME)
1654 value_id = VN_INFO (value)->value_id;
1655 else
1656 value_id = get_or_alloc_constant_value_id (value);
1657 return vn_reference_insert_pieces (vuse, set, type,
1658 operands.copy (), value, value_id);
1659}
1660
1661static vn_nary_op_t vn_nary_op_insert_stmt (gimple *stmt, tree result);
1662static unsigned mprts_hook_cnt;
1663
1664/* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
1665
1666static tree
1667vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops_)
1668{
1669 if (!rcode.is_tree_code ())
1670 return NULL_TREE;
1671 tree *ops = ops_;
1672 unsigned int length = TREE_CODE_LENGTH ((tree_code) rcode);
1673 if (rcode == CONSTRUCTOR
1674 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
1675 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
1676 && TREE_CODE (ops_[0]) == CONSTRUCTOR)
1677 {
1678 length = CONSTRUCTOR_NELTS (ops_[0]);
1679 ops = XALLOCAVEC (tree, length);
1680 for (unsigned i = 0; i < length; ++i)
1681 ops[i] = CONSTRUCTOR_ELT (ops_[0], i)->value;
1682 }
1683 vn_nary_op_t vnresult = NULL;
1684 tree res = vn_nary_op_lookup_pieces (length, (tree_code) rcode,
1685 type, ops, &vnresult);
1686 /* We can end up endlessly recursing simplifications if the lookup above
1687 presents us with a def-use chain that mirrors the original simplification.
1688 See PR80887 for an example. Limit successful lookup artificially
1689 to 10 times if we are called as mprts_hook. */
1690 if (res
1691 && mprts_hook
1692 && --mprts_hook_cnt == 0)
1693 {
1694 if (dump_file && (dump_flags & TDF_DETAILS))
1695 fprintf (dump_file, "Resetting mprts_hook after too many "
1696 "invocations.\n");
1697 mprts_hook = NULL;
1698 }
1699 return res;
1700}
1701
1702/* Return a value-number for RCODE OPS... either by looking up an existing
1703 value-number for the simplified result or by inserting the operation if
1704 INSERT is true. */
1705
1706static tree
1707vn_nary_build_or_lookup_1 (code_helper rcode, tree type, tree *ops,
1708 bool insert)
1709{
1710 tree result = NULL_TREE;
1711 /* We will be creating a value number for
1712 RCODE (OPS...).
1713 So first simplify and lookup this expression to see if it
1714 is already available. */
1715 mprts_hook = vn_lookup_simplify_result;
1716 mprts_hook_cnt = 9;
1717 bool res = false;
1718 switch (TREE_CODE_LENGTH ((tree_code) rcode))
1719 {
1720 case 1:
1721 res = gimple_resimplify1 (NULL, &rcode, type, ops, vn_valueize);
1722 break;
1723 case 2:
1724 res = gimple_resimplify2 (NULL, &rcode, type, ops, vn_valueize);
1725 break;
1726 case 3:
1727 res = gimple_resimplify3 (NULL, &rcode, type, ops, vn_valueize);
1728 break;
1729 }
1730 mprts_hook = NULL;
1731 gimple *new_stmt = NULL;
1732 if (res
1733 && gimple_simplified_result_is_gimple_val (rcode, ops))
1734 /* The expression is already available. */
1735 result = ops[0];
1736 else
1737 {
1738 tree val = vn_lookup_simplify_result (rcode, type, ops);
1739 if (!val && insert)
1740 {
1741 gimple_seq stmts = NULL;
1742 result = maybe_push_res_to_seq (rcode, type, ops, &stmts);
1743 if (result)
1744 {
1745 gcc_assert (gimple_seq_singleton_p (stmts));
1746 new_stmt = gimple_seq_first_stmt (stmts);
1747 }
1748 }
1749 else
1750 /* The expression is already available. */
1751 result = val;
1752 }
1753 if (new_stmt)
1754 {
1755 /* The expression is not yet available, value-number lhs to
1756 the new SSA_NAME we created. */
1757 /* Initialize value-number information properly. */
1758 VN_INFO_GET (result)->valnum = result;
1759 VN_INFO (result)->value_id = get_next_value_id ();
1760 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1761 new_stmt);
1762 VN_INFO (result)->needs_insertion = true;
1763 /* ??? PRE phi-translation inserts NARYs without corresponding
1764 SSA name result. Re-use those but set their result according
1765 to the stmt we just built. */
1766 vn_nary_op_t nary = NULL;
1767 vn_nary_op_lookup_stmt (new_stmt, &nary);
1768 if (nary)
1769 {
1770 gcc_assert (nary->result == NULL_TREE);
1771 nary->result = gimple_assign_lhs (new_stmt);
1772 }
1773 /* As all "inserted" statements are singleton SCCs, insert
1774 to the valid table. This is strictly needed to
1775 avoid re-generating new value SSA_NAMEs for the same
1776 expression during SCC iteration over and over (the
1777 optimistic table gets cleared after each iteration).
1778 We do not need to insert into the optimistic table, as
1779 lookups there will fall back to the valid table. */
1780 else if (current_info == optimistic_info)
1781 {
1782 current_info = valid_info;
1783 vn_nary_op_insert_stmt (new_stmt, result);
1784 current_info = optimistic_info;
1785 }
1786 else
1787 vn_nary_op_insert_stmt (new_stmt, result);
1788 if (dump_file && (dump_flags & TDF_DETAILS))
1789 {
1790 fprintf (dump_file, "Inserting name ");
1791 print_generic_expr (dump_file, result);
1792 fprintf (dump_file, " for expression ");
1793 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1794 fprintf (dump_file, "\n");
1795 }
1796 }
1797 return result;
1798}
1799
1800/* Return a value-number for RCODE OPS... either by looking up an existing
1801 value-number for the simplified result or by inserting the operation. */
1802
1803static tree
1804vn_nary_build_or_lookup (code_helper rcode, tree type, tree *ops)
1805{
1806 return vn_nary_build_or_lookup_1 (rcode, type, ops, true);
1807}
1808
1809/* Try to simplify the expression RCODE OPS... of type TYPE and return
1810 its value if present. */
1811
1812tree
1813vn_nary_simplify (vn_nary_op_t nary)
1814{
1815 if (nary->length > 3)
1816 return NULL_TREE;
1817 tree ops[3];
1818 memcpy (ops, nary->op, sizeof (tree) * nary->length);
1819 return vn_nary_build_or_lookup_1 (nary->opcode, nary->type, ops, false);
1820}
1821
1822
1823/* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1824 from the statement defining VUSE and if not successful tries to
1825 translate *REFP and VR_ through an aggregate copy at the definition
1826 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1827 of *REF and *VR. If only disambiguation was performed then
1828 *DISAMBIGUATE_ONLY is set to true. */
1829
1830static void *
1831vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1832 bool *disambiguate_only)
1833{
1834 vn_reference_t vr = (vn_reference_t)vr_;
1835 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1836 tree base = ao_ref_base (ref);
1837 HOST_WIDE_INT offset, maxsize;
1838 static vec<vn_reference_op_s> lhs_ops;
1839 ao_ref lhs_ref;
1840 bool lhs_ref_ok = false;
1841
1842 /* If the reference is based on a parameter that was determined as
1843 pointing to readonly memory it doesn't change. */
1844 if (TREE_CODE (base) == MEM_REF
1845 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1846 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1847 && bitmap_bit_p (const_parms,
1848 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1849 {
1850 *disambiguate_only = true;
1851 return NULL;
1852 }
1853
1854 /* First try to disambiguate after value-replacing in the definitions LHS. */
1855 if (is_gimple_assign (def_stmt))
1856 {
1857 tree lhs = gimple_assign_lhs (def_stmt);
1858 bool valueized_anything = false;
1859 /* Avoid re-allocation overhead. */
1860 lhs_ops.truncate (0);
1861 copy_reference_ops_from_ref (lhs, &lhs_ops);
1862 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1863 if (valueized_anything)
1864 {
1865 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1866 get_alias_set (lhs),
1867 TREE_TYPE (lhs), lhs_ops);
1868 if (lhs_ref_ok
1869 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1870 {
1871 *disambiguate_only = true;
1872 return NULL;
1873 }
1874 }
1875 else
1876 {
1877 ao_ref_init (&lhs_ref, lhs);
1878 lhs_ref_ok = true;
1879 }
1880
1881 /* If we reach a clobbering statement try to skip it and see if
1882 we find a VN result with exactly the same value as the
1883 possible clobber. In this case we can ignore the clobber
1884 and return the found value.
1885 Note that we don't need to worry about partial overlapping
1886 accesses as we then can use TBAA to disambiguate against the
1887 clobbering statement when looking up a load (thus the
1888 VN_WALKREWRITE guard). */
1889 if (vn_walk_kind == VN_WALKREWRITE
1890 && is_gimple_reg_type (TREE_TYPE (lhs))
1891 && types_compatible_p (TREE_TYPE (lhs), vr->type))
1892 {
1893 tree *saved_last_vuse_ptr = last_vuse_ptr;
1894 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
1895 last_vuse_ptr = NULL;
1896 tree saved_vuse = vr->vuse;
1897 hashval_t saved_hashcode = vr->hashcode;
1898 void *res = vn_reference_lookup_2 (ref,
1899 gimple_vuse (def_stmt), 0, vr);
1900 /* Need to restore vr->vuse and vr->hashcode. */
1901 vr->vuse = saved_vuse;
1902 vr->hashcode = saved_hashcode;
1903 last_vuse_ptr = saved_last_vuse_ptr;
1904 if (res && res != (void *)-1)
1905 {
1906 vn_reference_t vnresult = (vn_reference_t) res;
1907 if (vnresult->result
1908 && operand_equal_p (vnresult->result,
1909 gimple_assign_rhs1 (def_stmt), 0))
1910 return res;
1911 }
1912 }
1913 }
1914 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1915 && gimple_call_num_args (def_stmt) <= 4)
1916 {
1917 /* For builtin calls valueize its arguments and call the
1918 alias oracle again. Valueization may improve points-to
1919 info of pointers and constify size and position arguments.
1920 Originally this was motivated by PR61034 which has
1921 conditional calls to free falsely clobbering ref because
1922 of imprecise points-to info of the argument. */
1923 tree oldargs[4];
1924 bool valueized_anything = false;
1925 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1926 {
1927 oldargs[i] = gimple_call_arg (def_stmt, i);
1928 tree val = vn_valueize (oldargs[i]);
1929 if (val != oldargs[i])
1930 {
1931 gimple_call_set_arg (def_stmt, i, val);
1932 valueized_anything = true;
1933 }
1934 }
1935 if (valueized_anything)
1936 {
1937 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1938 ref);
1939 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1940 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1941 if (!res)
1942 {
1943 *disambiguate_only = true;
1944 return NULL;
1945 }
1946 }
1947 }
1948
1949 if (*disambiguate_only)
1950 return (void *)-1;
1951
1952 offset = ref->offset;
1953 maxsize = ref->max_size;
1954
1955 /* If we cannot constrain the size of the reference we cannot
1956 test if anything kills it. */
1957 if (maxsize == -1)
1958 return (void *)-1;
1959
1960 /* We can't deduce anything useful from clobbers. */
1961 if (gimple_clobber_p (def_stmt))
1962 return (void *)-1;
1963
1964 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1965 from that definition.
1966 1) Memset. */
1967 if (is_gimple_reg_type (vr->type)
1968 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1969 && integer_zerop (gimple_call_arg (def_stmt, 1))
1970 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1971 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1972 {
1973 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1974 tree base2;
1975 HOST_WIDE_INT offset2, size2, maxsize2;
1976 bool reverse;
1977 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1978 &reverse);
1979 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1980 if ((unsigned HOST_WIDE_INT)size2 / 8
1981 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1982 && maxsize2 != -1
1983 && operand_equal_p (base, base2, 0)
1984 && offset2 <= offset
1985 && offset2 + size2 >= offset + maxsize)
1986 {
1987 tree val = build_zero_cst (vr->type);
1988 return vn_reference_lookup_or_insert_for_pieces
1989 (vuse, vr->set, vr->type, vr->operands, val);
1990 }
1991 }
1992
1993 /* 2) Assignment from an empty CONSTRUCTOR. */
1994 else if (is_gimple_reg_type (vr->type)
1995 && gimple_assign_single_p (def_stmt)
1996 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1997 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1998 {
1999 tree base2;
2000 HOST_WIDE_INT offset2, size2, maxsize2;
2001 bool reverse;
2002 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2003 &offset2, &size2, &maxsize2, &reverse);
2004 if (maxsize2 != -1
2005 && operand_equal_p (base, base2, 0)
2006 && offset2 <= offset
2007 && offset2 + size2 >= offset + maxsize)
2008 {
2009 tree val = build_zero_cst (vr->type);
2010 return vn_reference_lookup_or_insert_for_pieces
2011 (vuse, vr->set, vr->type, vr->operands, val);
2012 }
2013 }
2014
2015 /* 3) Assignment from a constant. We can use folds native encode/interpret
2016 routines to extract the assigned bits. */
2017 else if (ref->size == maxsize
2018 && is_gimple_reg_type (vr->type)
2019 && !contains_storage_order_barrier_p (vr->operands)
2020 && gimple_assign_single_p (def_stmt)
2021 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2022 && maxsize % BITS_PER_UNIT == 0
2023 && offset % BITS_PER_UNIT == 0
2024 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2025 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2026 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2027 {
2028 tree base2;
2029 HOST_WIDE_INT offset2, size2, maxsize2;
2030 bool reverse;
2031 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2032 &offset2, &size2, &maxsize2, &reverse);
2033 if (!reverse
2034 && maxsize2 != -1
2035 && maxsize2 == size2
2036 && size2 % BITS_PER_UNIT == 0
2037 && offset2 % BITS_PER_UNIT == 0
2038 && operand_equal_p (base, base2, 0)
2039 && offset2 <= offset
2040 && offset2 + size2 >= offset + maxsize)
2041 {
2042 /* We support up to 512-bit values (for V8DFmode). */
2043 unsigned char buffer[64];
2044 int len;
2045
2046 tree rhs = gimple_assign_rhs1 (def_stmt);
2047 if (TREE_CODE (rhs) == SSA_NAME)
2048 rhs = SSA_VAL (rhs);
2049 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
2050 buffer, sizeof (buffer));
2051 if (len > 0)
2052 {
2053 tree type = vr->type;
2054 /* Make sure to interpret in a type that has a range
2055 covering the whole access size. */
2056 if (INTEGRAL_TYPE_P (vr->type)
2057 && ref->size != TYPE_PRECISION (vr->type))
2058 type = build_nonstandard_integer_type (ref->size,
2059 TYPE_UNSIGNED (type));
2060 tree val = native_interpret_expr (type,
2061 buffer
2062 + ((offset - offset2)
2063 / BITS_PER_UNIT),
2064 ref->size / BITS_PER_UNIT);
2065 /* If we chop off bits because the types precision doesn't
2066 match the memory access size this is ok when optimizing
2067 reads but not when called from the DSE code during
2068 elimination. */
2069 if (val
2070 && type != vr->type)
2071 {
2072 if (! int_fits_type_p (val, vr->type))
2073 val = NULL_TREE;
2074 else
2075 val = fold_convert (vr->type, val);
2076 }
2077
2078 if (val)
2079 return vn_reference_lookup_or_insert_for_pieces
2080 (vuse, vr->set, vr->type, vr->operands, val);
2081 }
2082 }
2083 }
2084
2085 /* 4) Assignment from an SSA name which definition we may be able
2086 to access pieces from. */
2087 else if (ref->size == maxsize
2088 && is_gimple_reg_type (vr->type)
2089 && !contains_storage_order_barrier_p (vr->operands)
2090 && gimple_assign_single_p (def_stmt)
2091 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2092 {
2093 tree base2;
2094 HOST_WIDE_INT offset2, size2, maxsize2;
2095 bool reverse;
2096 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2097 &offset2, &size2, &maxsize2,
2098 &reverse);
2099 if (!reverse
2100 && maxsize2 != -1
2101 && maxsize2 == size2
2102 && operand_equal_p (base, base2, 0)
2103 && offset2 <= offset
2104 && offset2 + size2 >= offset + maxsize
2105 /* ??? We can't handle bitfield precision extracts without
2106 either using an alternate type for the BIT_FIELD_REF and
2107 then doing a conversion or possibly adjusting the offset
2108 according to endianness. */
2109 && (! INTEGRAL_TYPE_P (vr->type)
2110 || ref->size == TYPE_PRECISION (vr->type))
2111 && ref->size % BITS_PER_UNIT == 0)
2112 {
2113 code_helper rcode = BIT_FIELD_REF;
2114 tree ops[3];
2115 ops[0] = SSA_VAL (gimple_assign_rhs1 (def_stmt));
2116 ops[1] = bitsize_int (ref->size);
2117 ops[2] = bitsize_int (offset - offset2);
2118 tree val = vn_nary_build_or_lookup (rcode, vr->type, ops);
2119 if (val
2120 && (TREE_CODE (val) != SSA_NAME
2121 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2122 {
2123 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2124 (vuse, vr->set, vr->type, vr->operands, val);
2125 return res;
2126 }
2127 }
2128 }
2129
2130 /* 5) For aggregate copies translate the reference through them if
2131 the copy kills ref. */
2132 else if (vn_walk_kind == VN_WALKREWRITE
2133 && gimple_assign_single_p (def_stmt)
2134 && (DECL_P (gimple_assign_rhs1 (def_stmt))
2135 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2136 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2137 {
2138 tree base2;
2139 HOST_WIDE_INT maxsize2;
2140 int i, j, k;
2141 auto_vec<vn_reference_op_s> rhs;
2142 vn_reference_op_t vro;
2143 ao_ref r;
2144
2145 if (!lhs_ref_ok)
2146 return (void *)-1;
2147
2148 /* See if the assignment kills REF. */
2149 base2 = ao_ref_base (&lhs_ref);
2150 maxsize2 = lhs_ref.max_size;
2151 if (maxsize2 == -1
2152 || (base != base2
2153 && (TREE_CODE (base) != MEM_REF
2154 || TREE_CODE (base2) != MEM_REF
2155 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2156 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2157 TREE_OPERAND (base2, 1))))
2158 || !stmt_kills_ref_p (def_stmt, ref))
2159 return (void *)-1;
2160
2161 /* Find the common base of ref and the lhs. lhs_ops already
2162 contains valueized operands for the lhs. */
2163 i = vr->operands.length () - 1;
2164 j = lhs_ops.length () - 1;
2165 while (j >= 0 && i >= 0
2166 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2167 {
2168 i--;
2169 j--;
2170 }
2171
2172 /* ??? The innermost op should always be a MEM_REF and we already
2173 checked that the assignment to the lhs kills vr. Thus for
2174 aggregate copies using char[] types the vn_reference_op_eq
2175 may fail when comparing types for compatibility. But we really
2176 don't care here - further lookups with the rewritten operands
2177 will simply fail if we messed up types too badly. */
2178 HOST_WIDE_INT extra_off = 0;
2179 if (j == 0 && i >= 0
2180 && lhs_ops[0].opcode == MEM_REF
2181 && lhs_ops[0].off != -1)
2182 {
2183 if (lhs_ops[0].off == vr->operands[i].off)
2184 i--, j--;
2185 else if (vr->operands[i].opcode == MEM_REF
2186 && vr->operands[i].off != -1)
2187 {
2188 extra_off = vr->operands[i].off - lhs_ops[0].off;
2189 i--, j--;
2190 }
2191 }
2192
2193 /* i now points to the first additional op.
2194 ??? LHS may not be completely contained in VR, one or more
2195 VIEW_CONVERT_EXPRs could be in its way. We could at least
2196 try handling outermost VIEW_CONVERT_EXPRs. */
2197 if (j != -1)
2198 return (void *)-1;
2199
2200 /* Punt if the additional ops contain a storage order barrier. */
2201 for (k = i; k >= 0; k--)
2202 {
2203 vro = &vr->operands[k];
2204 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2205 return (void *)-1;
2206 }
2207
2208 /* Now re-write REF to be based on the rhs of the assignment. */
2209 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2210
2211 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2212 if (extra_off != 0)
2213 {
2214 if (rhs.length () < 2
2215 || rhs[0].opcode != MEM_REF
2216 || rhs[0].off == -1)
2217 return (void *)-1;
2218 rhs[0].off += extra_off;
2219 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
2220 build_int_cst (TREE_TYPE (rhs[0].op0),
2221 extra_off));
2222 }
2223
2224 /* We need to pre-pend vr->operands[0..i] to rhs. */
2225 vec<vn_reference_op_s> old = vr->operands;
2226 if (i + 1 + rhs.length () > vr->operands.length ())
2227 vr->operands.safe_grow (i + 1 + rhs.length ());
2228 else
2229 vr->operands.truncate (i + 1 + rhs.length ());
2230 FOR_EACH_VEC_ELT (rhs, j, vro)
2231 vr->operands[i + 1 + j] = *vro;
2232 vr->operands = valueize_refs (vr->operands);
2233 if (old == shared_lookup_references)
2234 shared_lookup_references = vr->operands;
2235 vr->hashcode = vn_reference_compute_hash (vr);
2236
2237 /* Try folding the new reference to a constant. */
2238 tree val = fully_constant_vn_reference_p (vr);
2239 if (val)
2240 return vn_reference_lookup_or_insert_for_pieces
2241 (vuse, vr->set, vr->type, vr->operands, val);
2242
2243 /* Adjust *ref from the new operands. */
2244 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2245 return (void *)-1;
2246 /* This can happen with bitfields. */
2247 if (ref->size != r.size)
2248 return (void *)-1;
2249 *ref = r;
2250
2251 /* Do not update last seen VUSE after translating. */
2252 last_vuse_ptr = NULL;
2253
2254 /* Keep looking for the adjusted *REF / VR pair. */
2255 return NULL;
2256 }
2257
2258 /* 6) For memcpy copies translate the reference through them if
2259 the copy kills ref. */
2260 else if (vn_walk_kind == VN_WALKREWRITE
2261 && is_gimple_reg_type (vr->type)
2262 /* ??? Handle BCOPY as well. */
2263 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2264 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2265 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2266 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2267 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2268 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2269 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2270 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
2271 {
2272 tree lhs, rhs;
2273 ao_ref r;
2274 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
2275 vn_reference_op_s op;
2276 HOST_WIDE_INT at;
2277
2278 /* Only handle non-variable, addressable refs. */
2279 if (ref->size != maxsize
2280 || offset % BITS_PER_UNIT != 0
2281 || ref->size % BITS_PER_UNIT != 0)
2282 return (void *)-1;
2283
2284 /* Extract a pointer base and an offset for the destination. */
2285 lhs = gimple_call_arg (def_stmt, 0);
2286 lhs_offset = 0;
2287 if (TREE_CODE (lhs) == SSA_NAME)
2288 {
2289 lhs = SSA_VAL (lhs);
2290 if (TREE_CODE (lhs) == SSA_NAME)
2291 {
2292 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2293 if (gimple_assign_single_p (def_stmt)
2294 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2295 lhs = gimple_assign_rhs1 (def_stmt);
2296 }
2297 }
2298 if (TREE_CODE (lhs) == ADDR_EXPR)
2299 {
2300 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2301 &lhs_offset);
2302 if (!tem)
2303 return (void *)-1;
2304 if (TREE_CODE (tem) == MEM_REF
2305 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2306 {
2307 lhs = TREE_OPERAND (tem, 0);
2308 if (TREE_CODE (lhs) == SSA_NAME)
2309 lhs = SSA_VAL (lhs);
2310 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2311 }
2312 else if (DECL_P (tem))
2313 lhs = build_fold_addr_expr (tem);
2314 else
2315 return (void *)-1;
2316 }
2317 if (TREE_CODE (lhs) != SSA_NAME
2318 && TREE_CODE (lhs) != ADDR_EXPR)
2319 return (void *)-1;
2320
2321 /* Extract a pointer base and an offset for the source. */
2322 rhs = gimple_call_arg (def_stmt, 1);
2323 rhs_offset = 0;
2324 if (TREE_CODE (rhs) == SSA_NAME)
2325 rhs = SSA_VAL (rhs);
2326 if (TREE_CODE (rhs) == ADDR_EXPR)
2327 {
2328 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2329 &rhs_offset);
2330 if (!tem)
2331 return (void *)-1;
2332 if (TREE_CODE (tem) == MEM_REF
2333 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2334 {
2335 rhs = TREE_OPERAND (tem, 0);
2336 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2337 }
2338 else if (DECL_P (tem)
2339 || TREE_CODE (tem) == STRING_CST)
2340 rhs = build_fold_addr_expr (tem);
2341 else
2342 return (void *)-1;
2343 }
2344 if (TREE_CODE (rhs) != SSA_NAME
2345 && TREE_CODE (rhs) != ADDR_EXPR)
2346 return (void *)-1;
2347
2348 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
2349
2350 /* The bases of the destination and the references have to agree. */
2351 if ((TREE_CODE (base) != MEM_REF
2352 && !DECL_P (base))
2353 || (TREE_CODE (base) == MEM_REF
2354 && (TREE_OPERAND (base, 0) != lhs
2355 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
2356 || (DECL_P (base)
2357 && (TREE_CODE (lhs) != ADDR_EXPR
2358 || TREE_OPERAND (lhs, 0) != base)))
2359 return (void *)-1;
2360
2361 at = offset / BITS_PER_UNIT;
2362 if (TREE_CODE (base) == MEM_REF)
2363 at += tree_to_uhwi (TREE_OPERAND (base, 1));
2364 /* If the access is completely outside of the memcpy destination
2365 area there is no aliasing. */
2366 if (lhs_offset >= at + maxsize / BITS_PER_UNIT
2367 || lhs_offset + copy_size <= at)
2368 return NULL;
2369 /* And the access has to be contained within the memcpy destination. */
2370 if (lhs_offset > at
2371 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2372 return (void *)-1;
2373
2374 /* Make room for 2 operands in the new reference. */
2375 if (vr->operands.length () < 2)
2376 {
2377 vec<vn_reference_op_s> old = vr->operands;
2378 vr->operands.safe_grow_cleared (2);
2379 if (old == shared_lookup_references)
2380 shared_lookup_references = vr->operands;
2381 }
2382 else
2383 vr->operands.truncate (2);
2384
2385 /* The looked-through reference is a simple MEM_REF. */
2386 memset (&op, 0, sizeof (op));
2387 op.type = vr->type;
2388 op.opcode = MEM_REF;
2389 op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
2390 op.off = at - lhs_offset + rhs_offset;
2391 vr->operands[0] = op;
2392 op.type = TREE_TYPE (rhs);
2393 op.opcode = TREE_CODE (rhs);
2394 op.op0 = rhs;
2395 op.off = -1;
2396 vr->operands[1] = op;
2397 vr->hashcode = vn_reference_compute_hash (vr);
2398
2399 /* Try folding the new reference to a constant. */
2400 tree val = fully_constant_vn_reference_p (vr);
2401 if (val)
2402 return vn_reference_lookup_or_insert_for_pieces
2403 (vuse, vr->set, vr->type, vr->operands, val);
2404
2405 /* Adjust *ref from the new operands. */
2406 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2407 return (void *)-1;
2408 /* This can happen with bitfields. */
2409 if (ref->size != r.size)
2410 return (void *)-1;
2411 *ref = r;
2412
2413 /* Do not update last seen VUSE after translating. */
2414 last_vuse_ptr = NULL;
2415
2416 /* Keep looking for the adjusted *REF / VR pair. */
2417 return NULL;
2418 }
2419
2420 /* Bail out and stop walking. */
2421 return (void *)-1;
2422}
2423
2424/* Return a reference op vector from OP that can be used for
2425 vn_reference_lookup_pieces. The caller is responsible for releasing
2426 the vector. */
2427
2428vec<vn_reference_op_s>
2429vn_reference_operands_for_lookup (tree op)
2430{
2431 bool valueized;
2432 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2433}
2434
2435/* Lookup a reference operation by it's parts, in the current hash table.
2436 Returns the resulting value number if it exists in the hash table,
2437 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2438 vn_reference_t stored in the hashtable if something is found. */
2439
2440tree
2441vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2442 vec<vn_reference_op_s> operands,
2443 vn_reference_t *vnresult, vn_lookup_kind kind)
2444{
2445 struct vn_reference_s vr1;
2446 vn_reference_t tmp;
2447 tree cst;
2448
2449 if (!vnresult)
2450 vnresult = &tmp;
2451 *vnresult = NULL;
2452
2453 vr1.vuse = vuse_ssa_val (vuse);
2454 shared_lookup_references.truncate (0);
2455 shared_lookup_references.safe_grow (operands.length ());
2456 memcpy (shared_lookup_references.address (),
2457 operands.address (),
2458 sizeof (vn_reference_op_s)
2459 * operands.length ());
2460 vr1.operands = operands = shared_lookup_references
2461 = valueize_refs (shared_lookup_references);
2462 vr1.type = type;
2463 vr1.set = set;
2464 vr1.hashcode = vn_reference_compute_hash (&vr1);
2465 if ((cst = fully_constant_vn_reference_p (&vr1)))
2466 return cst;
2467
2468 vn_reference_lookup_1 (&vr1, vnresult);
2469 if (!*vnresult
2470 && kind != VN_NOWALK
2471 && vr1.vuse)
2472 {
2473 ao_ref r;
2474 vn_walk_kind = kind;
2475 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2476 *vnresult =
2477 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2478 vn_reference_lookup_2,
2479 vn_reference_lookup_3,
2480 vuse_ssa_val, &vr1);
2481 gcc_checking_assert (vr1.operands == shared_lookup_references);
2482 }
2483
2484 if (*vnresult)
2485 return (*vnresult)->result;
2486
2487 return NULL_TREE;
2488}
2489
2490/* Lookup OP in the current hash table, and return the resulting value
2491 number if it exists in the hash table. Return NULL_TREE if it does
2492 not exist in the hash table or if the result field of the structure
2493 was NULL.. VNRESULT will be filled in with the vn_reference_t
2494 stored in the hashtable if one exists. When TBAA_P is false assume
2495 we are looking up a store and treat it as having alias-set zero. */
2496
2497tree
2498vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2499 vn_reference_t *vnresult, bool tbaa_p)
2500{
2501 vec<vn_reference_op_s> operands;
2502 struct vn_reference_s vr1;
2503 tree cst;
2504 bool valuezied_anything;
2505
2506 if (vnresult)
2507 *vnresult = NULL;
2508
2509 vr1.vuse = vuse_ssa_val (vuse);
2510 vr1.operands = operands
2511 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2512 vr1.type = TREE_TYPE (op);
2513 vr1.set = tbaa_p ? get_alias_set (op) : 0;
2514 vr1.hashcode = vn_reference_compute_hash (&vr1);
2515 if ((cst = fully_constant_vn_reference_p (&vr1)))
2516 return cst;
2517
2518 if (kind != VN_NOWALK
2519 && vr1.vuse)
2520 {
2521 vn_reference_t wvnresult;
2522 ao_ref r;
2523 /* Make sure to use a valueized reference if we valueized anything.
2524 Otherwise preserve the full reference for advanced TBAA. */
2525 if (!valuezied_anything
2526 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2527 vr1.operands))
2528 ao_ref_init (&r, op);
2529 if (! tbaa_p)
2530 r.ref_alias_set = r.base_alias_set = 0;
2531 vn_walk_kind = kind;
2532 wvnresult =
2533 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2534 vn_reference_lookup_2,
2535 vn_reference_lookup_3,
2536 vuse_ssa_val, &vr1);
2537 gcc_checking_assert (vr1.operands == shared_lookup_references);
2538 if (wvnresult)
2539 {
2540 if (vnresult)
2541 *vnresult = wvnresult;
2542 return wvnresult->result;
2543 }
2544
2545 return NULL_TREE;
2546 }
2547
2548 return vn_reference_lookup_1 (&vr1, vnresult);
2549}
2550
2551/* Lookup CALL in the current hash table and return the entry in
2552 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2553
2554void
2555vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2556 vn_reference_t vr)
2557{
2558 if (vnresult)
2559 *vnresult = NULL;
2560
2561 tree vuse = gimple_vuse (call);
2562
2563 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2564 vr->operands = valueize_shared_reference_ops_from_call (call);
2565 vr->type = gimple_expr_type (call);
2566 vr->set = 0;
2567 vr->hashcode = vn_reference_compute_hash (vr);
2568 vn_reference_lookup_1 (vr, vnresult);
2569}
2570
2571/* Insert OP into the current hash table with a value number of
2572 RESULT, and return the resulting reference structure we created. */
2573
2574static vn_reference_t
2575vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2576{
2577 vn_reference_s **slot;
2578 vn_reference_t vr1;
2579 bool tem;
2580
2581 vr1 = current_info->references_pool->allocate ();
2582 if (TREE_CODE (result) == SSA_NAME)
2583 vr1->value_id = VN_INFO (result)->value_id;
2584 else
2585 vr1->value_id = get_or_alloc_constant_value_id (result);
2586 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2587 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2588 vr1->type = TREE_TYPE (op);
2589 vr1->set = get_alias_set (op);
2590 vr1->hashcode = vn_reference_compute_hash (vr1);
2591 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2592 vr1->result_vdef = vdef;
2593
2594 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2595 INSERT);
2596
2597 /* Because we lookup stores using vuses, and value number failures
2598 using the vdefs (see visit_reference_op_store for how and why),
2599 it's possible that on failure we may try to insert an already
2600 inserted store. This is not wrong, there is no ssa name for a
2601 store that we could use as a differentiator anyway. Thus, unlike
2602 the other lookup functions, you cannot gcc_assert (!*slot)
2603 here. */
2604
2605 /* But free the old slot in case of a collision. */
2606 if (*slot)
2607 free_reference (*slot);
2608
2609 *slot = vr1;
2610 return vr1;
2611}
2612
2613/* Insert a reference by it's pieces into the current hash table with
2614 a value number of RESULT. Return the resulting reference
2615 structure we created. */
2616
2617vn_reference_t
2618vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2619 vec<vn_reference_op_s> operands,
2620 tree result, unsigned int value_id)
2621
2622{
2623 vn_reference_s **slot;
2624 vn_reference_t vr1;
2625
2626 vr1 = current_info->references_pool->allocate ();
2627 vr1->value_id = value_id;
2628 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2629 vr1->operands = valueize_refs (operands);
2630 vr1->type = type;
2631 vr1->set = set;
2632 vr1->hashcode = vn_reference_compute_hash (vr1);
2633 if (result && TREE_CODE (result) == SSA_NAME)
2634 result = SSA_VAL (result);
2635 vr1->result = result;
2636
2637 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2638 INSERT);
2639
2640 /* At this point we should have all the things inserted that we have
2641 seen before, and we should never try inserting something that
2642 already exists. */
2643 gcc_assert (!*slot);
2644 if (*slot)
2645 free_reference (*slot);
2646
2647 *slot = vr1;
2648 return vr1;
2649}
2650
2651/* Compute and return the hash value for nary operation VBO1. */
2652
2653static hashval_t
2654vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2655{
2656 inchash::hash hstate;
2657 unsigned i;
2658
2659 for (i = 0; i < vno1->length; ++i)
2660 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2661 vno1->op[i] = SSA_VAL (vno1->op[i]);
2662
2663 if (((vno1->length == 2
2664 && commutative_tree_code (vno1->opcode))
2665 || (vno1->length == 3
2666 && commutative_ternary_tree_code (vno1->opcode)))
2667 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2668 std::swap (vno1->op[0], vno1->op[1]);
2669 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2670 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2671 {
2672 std::swap (vno1->op[0], vno1->op[1]);
2673 vno1->opcode = swap_tree_comparison (vno1->opcode);
2674 }
2675
2676 hstate.add_int (vno1->opcode);
2677 for (i = 0; i < vno1->length; ++i)
2678 inchash::add_expr (vno1->op[i], hstate);
2679
2680 return hstate.end ();
2681}
2682
2683/* Compare nary operations VNO1 and VNO2 and return true if they are
2684 equivalent. */
2685
2686bool
2687vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2688{
2689 unsigned i;
2690
2691 if (vno1->hashcode != vno2->hashcode)
2692 return false;
2693
2694 if (vno1->length != vno2->length)
2695 return false;
2696
2697 if (vno1->opcode != vno2->opcode
2698 || !types_compatible_p (vno1->type, vno2->type))
2699 return false;
2700
2701 for (i = 0; i < vno1->length; ++i)
2702 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2703 return false;
2704
2705 /* BIT_INSERT_EXPR has an implict operand as the type precision
2706 of op1. Need to check to make sure they are the same. */
2707 if (vno1->opcode == BIT_INSERT_EXPR
2708 && TREE_CODE (vno1->op[1]) == INTEGER_CST
2709 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
2710 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
2711 return false;
2712
2713 return true;
2714}
2715
2716/* Initialize VNO from the pieces provided. */
2717
2718static void
2719init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2720 enum tree_code code, tree type, tree *ops)
2721{
2722 vno->opcode = code;
2723 vno->length = length;
2724 vno->type = type;
2725 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2726}
2727
2728/* Initialize VNO from OP. */
2729
2730static void
2731init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2732{
2733 unsigned i;
2734
2735 vno->opcode = TREE_CODE (op);
2736 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2737 vno->type = TREE_TYPE (op);
2738 for (i = 0; i < vno->length; ++i)
2739 vno->op[i] = TREE_OPERAND (op, i);
2740}
2741
2742/* Return the number of operands for a vn_nary ops structure from STMT. */
2743
2744static unsigned int
2745vn_nary_length_from_stmt (gimple *stmt)
2746{
2747 switch (gimple_assign_rhs_code (stmt))
2748 {
2749 case REALPART_EXPR:
2750 case IMAGPART_EXPR:
2751 case VIEW_CONVERT_EXPR:
2752 return 1;
2753
2754 case BIT_FIELD_REF:
2755 return 3;
2756
2757 case CONSTRUCTOR:
2758 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2759
2760 default:
2761 return gimple_num_ops (stmt) - 1;
2762 }
2763}
2764
2765/* Initialize VNO from STMT. */
2766
2767static void
2768init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2769{
2770 unsigned i;
2771
2772 vno->opcode = gimple_assign_rhs_code (stmt);
2773 vno->type = gimple_expr_type (stmt);
2774 switch (vno->opcode)
2775 {
2776 case REALPART_EXPR:
2777 case IMAGPART_EXPR:
2778 case VIEW_CONVERT_EXPR:
2779 vno->length = 1;
2780 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2781 break;
2782
2783 case BIT_FIELD_REF:
2784 vno->length = 3;
2785 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2786 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2787 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2788 break;
2789
2790 case CONSTRUCTOR:
2791 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2792 for (i = 0; i < vno->length; ++i)
2793 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2794 break;
2795
2796 default:
2797 gcc_checking_assert (!gimple_assign_single_p (stmt));
2798 vno->length = gimple_num_ops (stmt) - 1;
2799 for (i = 0; i < vno->length; ++i)
2800 vno->op[i] = gimple_op (stmt, i + 1);
2801 }
2802}
2803
2804/* Compute the hashcode for VNO and look for it in the hash table;
2805 return the resulting value number if it exists in the hash table.
2806 Return NULL_TREE if it does not exist in the hash table or if the
2807 result field of the operation is NULL. VNRESULT will contain the
2808 vn_nary_op_t from the hashtable if it exists. */
2809
2810static tree
2811vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2812{
2813 vn_nary_op_s **slot;
2814
2815 if (vnresult)
2816 *vnresult = NULL;
2817
2818 vno->hashcode = vn_nary_op_compute_hash (vno);
2819 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2820 NO_INSERT);
2821 if (!slot && current_info == optimistic_info)
2822 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2823 NO_INSERT);
2824 if (!slot)
2825 return NULL_TREE;
2826 if (vnresult)
2827 *vnresult = *slot;
2828 return (*slot)->result;
2829}
2830
2831/* Lookup a n-ary operation by its pieces and return the resulting value
2832 number if it exists in the hash table. Return NULL_TREE if it does
2833 not exist in the hash table or if the result field of the operation
2834 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2835 if it exists. */
2836
2837tree
2838vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2839 tree type, tree *ops, vn_nary_op_t *vnresult)
2840{
2841 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2842 sizeof_vn_nary_op (length));
2843 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2844 return vn_nary_op_lookup_1 (vno1, vnresult);
2845}
2846
2847/* Lookup OP in the current hash table, and return the resulting value
2848 number if it exists in the hash table. Return NULL_TREE if it does
2849 not exist in the hash table or if the result field of the operation
2850 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2851 if it exists. */
2852
2853tree
2854vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2855{
2856 vn_nary_op_t vno1
2857 = XALLOCAVAR (struct vn_nary_op_s,
2858 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2859 init_vn_nary_op_from_op (vno1, op);
2860 return vn_nary_op_lookup_1 (vno1, vnresult);
2861}
2862
2863/* Lookup the rhs of STMT in the current hash table, and return the resulting
2864 value number if it exists in the hash table. Return NULL_TREE if
2865 it does not exist in the hash table. VNRESULT will contain the
2866 vn_nary_op_t from the hashtable if it exists. */
2867
2868tree
2869vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2870{
2871 vn_nary_op_t vno1
2872 = XALLOCAVAR (struct vn_nary_op_s,
2873 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2874 init_vn_nary_op_from_stmt (vno1, stmt);
2875 return vn_nary_op_lookup_1 (vno1, vnresult);
2876}
2877
2878/* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2879
2880static vn_nary_op_t
2881alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2882{
2883 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2884}
2885
2886/* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2887 obstack. */
2888
2889static vn_nary_op_t
2890alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2891{
2892 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2893 &current_info->nary_obstack);
2894
2895 vno1->value_id = value_id;
2896 vno1->length = length;
2897 vno1->result = result;
2898
2899 return vno1;
2900}
2901
2902/* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2903 VNO->HASHCODE first. */
2904
2905static vn_nary_op_t
2906vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2907 bool compute_hash)
2908{
2909 vn_nary_op_s **slot;
2910
2911 if (compute_hash)
2912 vno->hashcode = vn_nary_op_compute_hash (vno);
2913
2914 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2915 /* While we do not want to insert things twice it's awkward to
2916 avoid it in the case where visit_nary_op pattern-matches stuff
2917 and ends up simplifying the replacement to itself. We then
2918 get two inserts, one from visit_nary_op and one from
2919 vn_nary_build_or_lookup.
2920 So allow inserts with the same value number. */
2921 if (*slot && (*slot)->result == vno->result)
2922 return *slot;
2923
2924 gcc_assert (!*slot);
2925
2926 *slot = vno;
2927 return vno;
2928}
2929
2930/* Insert a n-ary operation into the current hash table using it's
2931 pieces. Return the vn_nary_op_t structure we created and put in
2932 the hashtable. */
2933
2934vn_nary_op_t
2935vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2936 tree type, tree *ops,
2937 tree result, unsigned int value_id)
2938{
2939 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2940 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2941 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2942}
2943
2944/* Insert OP into the current hash table with a value number of
2945 RESULT. Return the vn_nary_op_t structure we created and put in
2946 the hashtable. */
2947
2948vn_nary_op_t
2949vn_nary_op_insert (tree op, tree result)
2950{
2951 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2952 vn_nary_op_t vno1;
2953
2954 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2955 init_vn_nary_op_from_op (vno1, op);
2956 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2957}
2958
2959/* Insert the rhs of STMT into the current hash table with a value number of
2960 RESULT. */
2961
2962static vn_nary_op_t
2963vn_nary_op_insert_stmt (gimple *stmt, tree result)
2964{
2965 vn_nary_op_t vno1
2966 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2967 result, VN_INFO (result)->value_id);
2968 init_vn_nary_op_from_stmt (vno1, stmt);
2969 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2970}
2971
2972/* Compute a hashcode for PHI operation VP1 and return it. */
2973
2974static inline hashval_t
2975vn_phi_compute_hash (vn_phi_t vp1)
2976{
2977 inchash::hash hstate (vp1->phiargs.length () > 2
2978 ? vp1->block->index : vp1->phiargs.length ());
2979 tree phi1op;
2980 tree type;
2981 edge e;
2982 edge_iterator ei;
2983
2984 /* If all PHI arguments are constants we need to distinguish
2985 the PHI node via its type. */
2986 type = vp1->type;
2987 hstate.merge_hash (vn_hash_type (type));
2988
2989 FOR_EACH_EDGE (e, ei, vp1->block->preds)
2990 {
2991 /* Don't hash backedge values they need to be handled as VN_TOP
2992 for optimistic value-numbering. */
2993 if (e->flags & EDGE_DFS_BACK)
2994 continue;
2995
2996 phi1op = vp1->phiargs[e->dest_idx];
2997 if (phi1op == VN_TOP)
2998 continue;
2999 inchash::add_expr (phi1op, hstate);
3000 }
3001
3002 return hstate.end ();
3003}
3004
3005
3006/* Return true if COND1 and COND2 represent the same condition, set
3007 *INVERTED_P if one needs to be inverted to make it the same as
3008 the other. */
3009
3010static bool
3011cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
3012 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
3013{
3014 enum tree_code code1 = gimple_cond_code (cond1);
3015 enum tree_code code2 = gimple_cond_code (cond2);
3016
3017 *inverted_p = false;
3018 if (code1 == code2)
3019 ;
3020 else if (code1 == swap_tree_comparison (code2))
3021 std::swap (lhs2, rhs2);
3022 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
3023 *inverted_p = true;
3024 else if (code1 == invert_tree_comparison
3025 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
3026 {
3027 std::swap (lhs2, rhs2);
3028 *inverted_p = true;
3029 }
3030 else
3031 return false;
3032
3033 return ((expressions_equal_p (lhs1, lhs2)
3034 && expressions_equal_p (rhs1, rhs2))
3035 || (commutative_tree_code (code1)
3036 && expressions_equal_p (lhs1, rhs2)
3037 && expressions_equal_p (rhs1, lhs2)));
3038}
3039
3040/* Compare two phi entries for equality, ignoring VN_TOP arguments. */
3041
3042static int
3043vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
3044{
3045 if (vp1->hashcode != vp2->hashcode)
3046 return false;
3047
3048 if (vp1->block != vp2->block)
3049 {
3050 if (vp1->phiargs.length () != vp2->phiargs.length ())
3051 return false;
3052
3053 switch (vp1->phiargs.length ())
3054 {
3055 case 1:
3056 /* Single-arg PHIs are just copies. */
3057 break;
3058
3059 case 2:
3060 {
3061 /* Rule out backedges into the PHI. */
3062 if (vp1->block->loop_father->header == vp1->block
3063 || vp2->block->loop_father->header == vp2->block)
3064 return false;
3065
3066 /* If the PHI nodes do not have compatible types
3067 they are not the same. */
3068 if (!types_compatible_p (vp1->type, vp2->type))
3069 return false;
3070
3071 basic_block idom1
3072 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3073 basic_block idom2
3074 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
3075 /* If the immediate dominator end in switch stmts multiple
3076 values may end up in the same PHI arg via intermediate
3077 CFG merges. */
3078 if (EDGE_COUNT (idom1->succs) != 2
3079 || EDGE_COUNT (idom2->succs) != 2)
3080 return false;
3081
3082 /* Verify the controlling stmt is the same. */
3083 gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
3084 gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
3085 if (! last1 || ! last2)
3086 return false;
3087 bool inverted_p;
3088 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
3089 last2, vp2->cclhs, vp2->ccrhs,
3090 &inverted_p))
3091 return false;
3092
3093 /* Get at true/false controlled edges into the PHI. */
3094 edge te1, te2, fe1, fe2;
3095 if (! extract_true_false_controlled_edges (idom1, vp1->block,
3096 &te1, &fe1)
3097 || ! extract_true_false_controlled_edges (idom2, vp2->block,
3098 &te2, &fe2))
3099 return false;
3100
3101 /* Swap edges if the second condition is the inverted of the
3102 first. */
3103 if (inverted_p)
3104 std::swap (te2, fe2);
3105
3106 /* ??? Handle VN_TOP specially. */
3107 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3108 vp2->phiargs[te2->dest_idx])
3109 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3110 vp2->phiargs[fe2->dest_idx]))
3111 return false;
3112
3113 return true;
3114 }
3115
3116 default:
3117 return false;
3118 }
3119 }
3120
3121 /* If the PHI nodes do not have compatible types
3122 they are not the same. */
3123 if (!types_compatible_p (vp1->type, vp2->type))
3124 return false;
3125
3126 /* Any phi in the same block will have it's arguments in the
3127 same edge order, because of how we store phi nodes. */
3128 int i;
3129 tree phi1op;
3130 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
3131 {
3132 tree phi2op = vp2->phiargs[i];
3133 if (phi1op == VN_TOP || phi2op == VN_TOP)
3134 continue;
3135 if (!expressions_equal_p (phi1op, phi2op))
3136 return false;
3137 }
3138
3139 return true;
3140}
3141
3142static vec<tree> shared_lookup_phiargs;
3143
3144/* Lookup PHI in the current hash table, and return the resulting
3145 value number if it exists in the hash table. Return NULL_TREE if
3146 it does not exist in the hash table. */
3147
3148static tree
3149vn_phi_lookup (gimple *phi)
3150{
3151 vn_phi_s **slot;
3152 struct vn_phi_s vp1;
3153 edge e;
3154 edge_iterator ei;
3155
3156 shared_lookup_phiargs.truncate (0);
3157 shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
3158
3159 /* Canonicalize the SSA_NAME's to their value number. */
3160 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3161 {
3162 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3163 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3164 shared_lookup_phiargs[e->dest_idx] = def;
3165 }
3166 vp1.type = TREE_TYPE (gimple_phi_result (phi));
3167 vp1.phiargs = shared_lookup_phiargs;
3168 vp1.block = gimple_bb (phi);
3169 /* Extract values of the controlling condition. */
3170 vp1.cclhs = NULL_TREE;
3171 vp1.ccrhs = NULL_TREE;
3172 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1.block);
3173 if (EDGE_COUNT (idom1->succs) == 2)
3174 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3175 {
3176 vp1.cclhs = vn_valueize (gimple_cond_lhs (last1));
3177 vp1.ccrhs = vn_valueize (gimple_cond_rhs (last1));
3178 }
3179 vp1.hashcode = vn_phi_compute_hash (&vp1);
3180 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3181 NO_INSERT);
3182 if (!slot && current_info == optimistic_info)
3183 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3184 NO_INSERT);
3185 if (!slot)
3186 return NULL_TREE;
3187 return (*slot)->result;
3188}
3189
3190/* Insert PHI into the current hash table with a value number of
3191 RESULT. */
3192
3193static vn_phi_t
3194vn_phi_insert (gimple *phi, tree result)
3195{
3196 vn_phi_s **slot;
3197 vn_phi_t vp1 = current_info->phis_pool->allocate ();
3198 vec<tree> args = vNULL;
3199 edge e;
3200 edge_iterator ei;
3201
3202 args.safe_grow (gimple_phi_num_args (phi));
3203
3204 /* Canonicalize the SSA_NAME's to their value number. */
3205 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3206 {
3207 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3208 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3209 args[e->dest_idx] = def;
3210 }
3211 vp1->value_id = VN_INFO (result)->value_id;
3212 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3213 vp1->phiargs = args;
3214 vp1->block = gimple_bb (phi);
3215 /* Extract values of the controlling condition. */
3216 vp1->cclhs = NULL_TREE;
3217 vp1->ccrhs = NULL_TREE;
3218 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3219 if (EDGE_COUNT (idom1->succs) == 2)
3220 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3221 {
3222 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3223 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3224 }
3225 vp1->result = result;
3226 vp1->hashcode = vn_phi_compute_hash (vp1);
3227
3228 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3229
3230 /* Because we iterate over phi operations more than once, it's
3231 possible the slot might already exist here, hence no assert.*/
3232 *slot = vp1;
3233 return vp1;
3234}
3235
3236
3237/* Print set of components in strongly connected component SCC to OUT. */
3238
3239static void
3240print_scc (FILE *out, vec<tree> scc)
3241{
3242 tree var;
3243 unsigned int i;
3244
3245 fprintf (out, "SCC consists of %u:", scc.length ());
3246 FOR_EACH_VEC_ELT (scc, i, var)
3247 {
3248 fprintf (out, " ");
3249 print_generic_expr (out, var);
3250 }
3251 fprintf (out, "\n");
3252}
3253
3254/* Return true if BB1 is dominated by BB2 taking into account edges
3255 that are not executable. */
3256
3257static bool
3258dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3259{
3260 edge_iterator ei;
3261 edge e;
3262
3263 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3264 return true;
3265
3266 /* Before iterating we'd like to know if there exists a
3267 (executable) path from bb2 to bb1 at all, if not we can
3268 directly return false. For now simply iterate once. */
3269
3270 /* Iterate to the single executable bb1 predecessor. */
3271 if (EDGE_COUNT (bb1->preds) > 1)
3272 {
3273 edge prede = NULL;
3274 FOR_EACH_EDGE (e, ei, bb1->preds)
3275 if (e->flags & EDGE_EXECUTABLE)
3276 {
3277 if (prede)
3278 {
3279 prede = NULL;
3280 break;
3281 }
3282 prede = e;
3283 }
3284 if (prede)
3285 {
3286 bb1 = prede->src;
3287
3288 /* Re-do the dominance check with changed bb1. */
3289 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3290 return true;
3291 }
3292 }
3293
3294 /* Iterate to the single executable bb2 successor. */
3295 edge succe = NULL;
3296 FOR_EACH_EDGE (e, ei, bb2->succs)
3297 if (e->flags & EDGE_EXECUTABLE)
3298 {
3299 if (succe)
3300 {
3301 succe = NULL;
3302 break;
3303 }
3304 succe = e;
3305 }
3306 if (succe)
3307 {
3308 /* Verify the reached block is only reached through succe.
3309 If there is only one edge we can spare us the dominator
3310 check and iterate directly. */
3311 if (EDGE_COUNT (succe->dest->preds) > 1)
3312 {
3313 FOR_EACH_EDGE (e, ei, succe->dest->preds)
3314 if (e != succe
3315 && (e->flags & EDGE_EXECUTABLE))
3316 {
3317 succe = NULL;
3318 break;
3319 }
3320 }
3321 if (succe)
3322 {
3323 bb2 = succe->dest;
3324
3325 /* Re-do the dominance check with changed bb2. */
3326 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3327 return true;
3328 }
3329 }
3330
3331 /* We could now iterate updating bb1 / bb2. */
3332 return false;
3333}
3334
3335/* Set the value number of FROM to TO, return true if it has changed
3336 as a result. */
3337
3338static inline bool
3339set_ssa_val_to (tree from, tree to)
3340{
3341 tree currval = SSA_VAL (from);
3342 HOST_WIDE_INT toff, coff;
3343
3344 /* The only thing we allow as value numbers are ssa_names
3345 and invariants. So assert that here. We don't allow VN_TOP
3346 as visiting a stmt should produce a value-number other than
3347 that.
3348 ??? Still VN_TOP can happen for unreachable code, so force
3349 it to varying in that case. Not all code is prepared to
3350 get VN_TOP on valueization. */
3351 if (to == VN_TOP)
3352 {
3353 if (dump_file && (dump_flags & TDF_DETAILS))
3354 fprintf (dump_file, "Forcing value number to varying on "
3355 "receiving VN_TOP\n");
3356 to = from;
3357 }
3358
3359 gcc_assert (to != NULL_TREE
3360 && ((TREE_CODE (to) == SSA_NAME
3361 && (to == from || SSA_VAL (to) == to))
3362 || is_gimple_min_invariant (to)));
3363
3364 if (from != to)
3365 {
3366 if (currval == from)
3367 {
3368 if (dump_file && (dump_flags & TDF_DETAILS))
3369 {
3370 fprintf (dump_file, "Not changing value number of ");
3371 print_generic_expr (dump_file, from);
3372 fprintf (dump_file, " from VARYING to ");
3373 print_generic_expr (dump_file, to);
3374 fprintf (dump_file, "\n");
3375 }
3376 return false;
3377 }
3378 else if (currval != VN_TOP
3379 && ! is_gimple_min_invariant (currval)
3380 && is_gimple_min_invariant (to))
3381 {
3382 if (dump_file && (dump_flags & TDF_DETAILS))
3383 {
3384 fprintf (dump_file, "Forcing VARYING instead of changing "
3385 "value number of ");
3386 print_generic_expr (dump_file, from);
3387 fprintf (dump_file, " from ");
3388 print_generic_expr (dump_file, currval);
3389 fprintf (dump_file, " (non-constant) to ");
3390 print_generic_expr (dump_file, to);
3391 fprintf (dump_file, " (constant)\n");
3392 }
3393 to = from;
3394 }
3395 else if (TREE_CODE (to) == SSA_NAME
3396 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3397 to = from;
3398 }
3399
3400 if (dump_file && (dump_flags & TDF_DETAILS))
3401 {
3402 fprintf (dump_file, "Setting value number of ");
3403 print_generic_expr (dump_file, from);
3404 fprintf (dump_file, " to ");
3405 print_generic_expr (dump_file, to);
3406 }
3407
3408 if (currval != to
3409 && !operand_equal_p (currval, to, 0)
3410 /* Different undefined SSA names are not actually different. See
3411 PR82320 for a testcase were we'd otherwise not terminate iteration. */
3412 && !(TREE_CODE (currval) == SSA_NAME
3413 && TREE_CODE (to) == SSA_NAME
3414 && ssa_undefined_value_p (currval, false)
3415 && ssa_undefined_value_p (to, false))
3416 /* ??? For addresses involving volatile objects or types operand_equal_p
3417 does not reliably detect ADDR_EXPRs as equal. We know we are only
3418 getting invariant gimple addresses here, so can use
3419 get_addr_base_and_unit_offset to do this comparison. */
3420 && !(TREE_CODE (currval) == ADDR_EXPR
3421 && TREE_CODE (to) == ADDR_EXPR
3422 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3423 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3424 && coff == toff))
3425 {
3426 if (dump_file && (dump_flags & TDF_DETAILS))
3427 fprintf (dump_file, " (changed)\n");
3428
3429 /* If we equate two SSA names we have to make the side-band info
3430 of the leader conservative (and remember whatever original value
3431 was present). */
3432 if (TREE_CODE (to) == SSA_NAME)
3433 {
3434 if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3435 && SSA_NAME_RANGE_INFO (to))
3436 {
3437 if (SSA_NAME_IS_DEFAULT_DEF (to)
3438 || dominated_by_p_w_unex
3439 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3440 gimple_bb (SSA_NAME_DEF_STMT (to))))
3441 /* Keep the info from the dominator. */
3442 ;
3443 else
3444 {
3445 /* Save old info. */
3446 if (! VN_INFO (to)->info.range_info)
3447 {
3448 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3449 VN_INFO (to)->range_info_anti_range_p
3450 = SSA_NAME_ANTI_RANGE_P (to);
3451 }
3452 /* Rather than allocating memory and unioning the info
3453 just clear it. */
3454 if (dump_file && (dump_flags & TDF_DETAILS))
3455 {
3456 fprintf (dump_file, "clearing range info of ");
3457 print_generic_expr (dump_file, to);
3458 fprintf (dump_file, "\n");
3459 }
3460 SSA_NAME_RANGE_INFO (to) = NULL;
3461 }
3462 }
3463 else if (POINTER_TYPE_P (TREE_TYPE (to))
3464 && SSA_NAME_PTR_INFO (to))
3465 {
3466 if (SSA_NAME_IS_DEFAULT_DEF (to)
3467 || dominated_by_p_w_unex
3468 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3469 gimple_bb (SSA_NAME_DEF_STMT (to))))
3470 /* Keep the info from the dominator. */
3471 ;
3472 else if (! SSA_NAME_PTR_INFO (from)
3473 /* Handle the case of trivially equivalent info. */
3474 || memcmp (SSA_NAME_PTR_INFO (to),
3475 SSA_NAME_PTR_INFO (from),
3476 sizeof (ptr_info_def)) != 0)
3477 {
3478 /* Save old info. */
3479 if (! VN_INFO (to)->info.ptr_info)
3480 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3481 /* Rather than allocating memory and unioning the info
3482 just clear it. */
3483 if (dump_file && (dump_flags & TDF_DETAILS))
3484 {
3485 fprintf (dump_file, "clearing points-to info of ");
3486 print_generic_expr (dump_file, to);
3487 fprintf (dump_file, "\n");
3488 }
3489 SSA_NAME_PTR_INFO (to) = NULL;
3490 }
3491 }
3492 }
3493
3494 VN_INFO (from)->valnum = to;
3495 return true;
3496 }
3497 if (dump_file && (dump_flags & TDF_DETAILS))
3498 fprintf (dump_file, "\n");
3499 return false;
3500}
3501
3502/* Mark as processed all the definitions in the defining stmt of USE, or
3503 the USE itself. */
3504
3505static void
3506mark_use_processed (tree use)
3507{
3508 ssa_op_iter iter;
3509 def_operand_p defp;
3510 gimple *stmt = SSA_NAME_DEF_STMT (use);
3511
3512 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3513 {
3514 VN_INFO (use)->use_processed = true;
3515 return;
3516 }
3517
3518 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3519 {
3520 tree def = DEF_FROM_PTR (defp);
3521
3522 VN_INFO (def)->use_processed = true;
3523 }
3524}
3525
3526/* Set all definitions in STMT to value number to themselves.
3527 Return true if a value number changed. */
3528
3529static bool
3530defs_to_varying (gimple *stmt)
3531{
3532 bool changed = false;
3533 ssa_op_iter iter;
3534 def_operand_p defp;
3535
3536 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3537 {
3538 tree def = DEF_FROM_PTR (defp);
3539 changed |= set_ssa_val_to (def, def);
3540 }
3541 return changed;
3542}
3543
3544/* Visit a copy between LHS and RHS, return true if the value number
3545 changed. */
3546
3547static bool
3548visit_copy (tree lhs, tree rhs)
3549{
3550 /* Valueize. */
3551 rhs = SSA_VAL (rhs);
3552
3553 return set_ssa_val_to (lhs, rhs);
3554}
3555
3556/* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3557 is the same. */
3558
3559static tree
3560valueized_wider_op (tree wide_type, tree op)
3561{
3562 if (TREE_CODE (op) == SSA_NAME)
3563 op = SSA_VAL (op);
3564
3565 /* Either we have the op widened available. */
3566 tree ops[3] = {};
3567 ops[0] = op;
3568 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
3569 wide_type, ops, NULL);
3570 if (tem)
3571 return tem;
3572
3573 /* Or the op is truncated from some existing value. */
3574 if (TREE_CODE (op) == SSA_NAME)
3575 {
3576 gimple *def = SSA_NAME_DEF_STMT (op);
3577 if (is_gimple_assign (def)
3578 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3579 {
3580 tem = gimple_assign_rhs1 (def);
3581 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
3582 {
3583 if (TREE_CODE (tem) == SSA_NAME)
3584 tem = SSA_VAL (tem);
3585 return tem;
3586 }
3587 }
3588 }
3589
3590 /* For constants simply extend it. */
3591 if (TREE_CODE (op) == INTEGER_CST)
3592 return wide_int_to_tree (wide_type, wi::to_wide (op));
3593
3594 return NULL_TREE;
3595}
3596
3597/* Visit a nary operator RHS, value number it, and return true if the
3598 value number of LHS has changed as a result. */
3599
3600static bool
3601visit_nary_op (tree lhs, gassign *stmt)
3602{
3603 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3604 if (result)
3605 return set_ssa_val_to (lhs, result);
3606
3607 /* Do some special pattern matching for redundancies of operations
3608 in different types. */
3609 enum tree_code code = gimple_assign_rhs_code (stmt);
3610 tree type = TREE_TYPE (lhs);
3611 tree rhs1 = gimple_assign_rhs1 (stmt);
3612 switch (code)
3613 {
3614 CASE_CONVERT:
3615 /* Match arithmetic done in a different type where we can easily
3616 substitute the result from some earlier sign-changed or widened
3617 operation. */
3618 if (INTEGRAL_TYPE_P (type)
3619 && TREE_CODE (rhs1) == SSA_NAME
3620 /* We only handle sign-changes or zero-extension -> & mask. */
3621 && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
3622 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
3623 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
3624 {
3625 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
3626 if (def
3627 && (gimple_assign_rhs_code (def) == PLUS_EXPR
3628 || gimple_assign_rhs_code (def) == MINUS_EXPR
3629 || gimple_assign_rhs_code (def) == MULT_EXPR))
3630 {
3631 tree ops[3] = {};
3632 /* Either we have the op widened available. */
3633 ops[0] = valueized_wider_op (type,
3634 gimple_assign_rhs1 (def));
3635 if (ops[0])
3636 ops[1] = valueized_wider_op (type,
3637 gimple_assign_rhs2 (def));
3638 if (ops[0] && ops[1])
3639 {
3640 ops[0] = vn_nary_op_lookup_pieces
3641 (2, gimple_assign_rhs_code (def), type, ops, NULL);
3642 /* We have wider operation available. */
3643 if (ops[0])
3644 {
3645 unsigned lhs_prec = TYPE_PRECISION (type);
3646 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
3647 if (lhs_prec == rhs_prec)
3648 {
3649 ops[1] = NULL_TREE;
3650 result = vn_nary_build_or_lookup (NOP_EXPR,
3651 type, ops);
3652 if (result)
3653 {
3654 bool changed = set_ssa_val_to (lhs, result);
3655 vn_nary_op_insert_stmt (stmt, result);
3656 return changed;
3657 }
3658 }
3659 else
3660 {
3661 ops[1] = wide_int_to_tree (type,
3662 wi::mask (rhs_prec, false,
3663 lhs_prec));
3664 result = vn_nary_build_or_lookup (BIT_AND_EXPR,
3665 TREE_TYPE (lhs),
3666 ops);
3667 if (result)
3668 {
3669 bool changed = set_ssa_val_to (lhs, result);
3670 vn_nary_op_insert_stmt (stmt, result);
3671 return changed;
3672 }
3673 }
3674 }
3675 }
3676 }
3677 }
3678 default:;
3679 }
3680
3681 bool changed = set_ssa_val_to (lhs, lhs);
3682 vn_nary_op_insert_stmt (stmt, lhs);
3683 return changed;
3684}
3685
3686/* Visit a call STMT storing into LHS. Return true if the value number
3687 of the LHS has changed as a result. */
3688
3689static bool
3690visit_reference_op_call (tree lhs, gcall *stmt)
3691{
3692 bool changed = false;
3693 struct vn_reference_s vr1;
3694 vn_reference_t vnresult = NULL;
3695 tree vdef = gimple_vdef (stmt);
3696
3697 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3698 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3699 lhs = NULL_TREE;
3700
3701 vn_reference_lookup_call (stmt, &vnresult, &vr1);
3702 if (vnresult)
3703 {
3704 if (vnresult->result_vdef && vdef)
3705 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3706 else if (vdef)
3707 /* If the call was discovered to be pure or const reflect
3708 that as far as possible. */
3709 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
3710
3711 if (!vnresult->result && lhs)
3712 vnresult->result = lhs;
3713
3714 if (vnresult->result && lhs)
3715 changed |= set_ssa_val_to (lhs, vnresult->result);
3716 }
3717 else
3718 {
3719 vn_reference_t vr2;
3720 vn_reference_s **slot;
3721 tree vdef_val = vdef;
3722 if (vdef)
3723 {
3724 /* If we value numbered an indirect functions function to
3725 one not clobbering memory value number its VDEF to its
3726 VUSE. */
3727 tree fn = gimple_call_fn (stmt);
3728 if (fn && TREE_CODE (fn) == SSA_NAME)
3729 {
3730 fn = SSA_VAL (fn);
3731 if (TREE_CODE (fn) == ADDR_EXPR
3732 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3733 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
3734 & (ECF_CONST | ECF_PURE)))
3735 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
3736 }
3737 changed |= set_ssa_val_to (vdef, vdef_val);
3738 }
3739 if (lhs)
3740 changed |= set_ssa_val_to (lhs, lhs);
3741 vr2 = current_info->references_pool->allocate ();
3742 vr2->vuse = vr1.vuse;
3743 /* As we are not walking the virtual operand chain we know the
3744 shared_lookup_references are still original so we can re-use
3745 them here. */
3746 vr2->operands = vr1.operands.copy ();
3747 vr2->type = vr1.type;
3748 vr2->set = vr1.set;
3749 vr2->hashcode = vr1.hashcode;
3750 vr2->result = lhs;
3751 vr2->result_vdef = vdef_val;
3752 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3753 INSERT);
3754 gcc_assert (!*slot);
3755 *slot = vr2;
3756 }
3757
3758 return changed;
3759}
3760
3761/* Visit a load from a reference operator RHS, part of STMT, value number it,
3762 and return true if the value number of the LHS has changed as a result. */
3763
3764static bool
3765visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3766{
3767 bool changed = false;
3768 tree last_vuse;
3769 tree result;
3770
3771 last_vuse = gimple_vuse (stmt);
3772 last_vuse_ptr = &last_vuse;
3773 result = vn_reference_lookup (op, gimple_vuse (stmt),
3774 default_vn_walk_kind, NULL, true);
3775 last_vuse_ptr = NULL;
3776
3777 /* We handle type-punning through unions by value-numbering based
3778 on offset and size of the access. Be prepared to handle a
3779 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3780 if (result
3781 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3782 {
3783 /* We will be setting the value number of lhs to the value number
3784 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3785 So first simplify and lookup this expression to see if it
3786 is already available. */
3787 code_helper rcode = VIEW_CONVERT_EXPR;
3788 tree ops[3] = { result };
3789 result = vn_nary_build_or_lookup (rcode, TREE_TYPE (op), ops);
3790 }
3791
3792 if (result)
3793 changed = set_ssa_val_to (lhs, result);
3794 else
3795 {
3796 changed = set_ssa_val_to (lhs, lhs);
3797 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3798 }
3799
3800 return changed;
3801}
3802
3803
3804/* Visit a store to a reference operator LHS, part of STMT, value number it,
3805 and return true if the value number of the LHS has changed as a result. */
3806
3807static bool
3808visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3809{
3810 bool changed = false;
3811 vn_reference_t vnresult = NULL;
3812 tree assign;
3813 bool resultsame = false;
3814 tree vuse = gimple_vuse (stmt);
3815 tree vdef = gimple_vdef (stmt);
3816
3817 if (TREE_CODE (op) == SSA_NAME)
3818 op = SSA_VAL (op);
3819
3820 /* First we want to lookup using the *vuses* from the store and see
3821 if there the last store to this location with the same address
3822 had the same value.
3823
3824 The vuses represent the memory state before the store. If the
3825 memory state, address, and value of the store is the same as the
3826 last store to this location, then this store will produce the
3827 same memory state as that store.
3828
3829 In this case the vdef versions for this store are value numbered to those
3830 vuse versions, since they represent the same memory state after
3831 this store.
3832
3833 Otherwise, the vdefs for the store are used when inserting into
3834 the table, since the store generates a new memory state. */
3835
3836 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
3837 if (vnresult
3838 && vnresult->result)
3839 {
3840 tree result = vnresult->result;
3841 if (TREE_CODE (result) == SSA_NAME)
3842 result = SSA_VAL (result);
3843 resultsame = expressions_equal_p (result, op);
3844 if (resultsame)
3845 {
3846 /* If the TBAA state isn't compatible for downstream reads
3847 we cannot value-number the VDEFs the same. */
3848 alias_set_type set = get_alias_set (lhs);
3849 if (vnresult->set != set
3850 && ! alias_set_subset_of (set, vnresult->set))
3851 resultsame = false;
3852 }
3853 }
3854
3855 if (!resultsame)
3856 {
3857 /* Only perform the following when being called from PRE
3858 which embeds tail merging. */
3859 if (default_vn_walk_kind == VN_WALK)
3860 {
3861 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3862 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3863 if (vnresult)
3864 {
3865 VN_INFO (vdef)->use_processed = true;
3866 return set_ssa_val_to (vdef, vnresult->result_vdef);
3867 }
3868 }
3869
3870 if (dump_file && (dump_flags & TDF_DETAILS))
3871 {
3872 fprintf (dump_file, "No store match\n");
3873 fprintf (dump_file, "Value numbering store ");
3874 print_generic_expr (dump_file, lhs);
3875 fprintf (dump_file, " to ");
3876 print_generic_expr (dump_file, op);
3877 fprintf (dump_file, "\n");
3878 }
3879 /* Have to set value numbers before insert, since insert is
3880 going to valueize the references in-place. */
3881 if (vdef)
3882 changed |= set_ssa_val_to (vdef, vdef);
3883
3884 /* Do not insert structure copies into the tables. */
3885 if (is_gimple_min_invariant (op)
3886 || is_gimple_reg (op))
3887 vn_reference_insert (lhs, op, vdef, NULL);
3888
3889 /* Only perform the following when being called from PRE
3890 which embeds tail merging. */
3891 if (default_vn_walk_kind == VN_WALK)
3892 {
3893 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3894 vn_reference_insert (assign, lhs, vuse, vdef);
3895 }
3896 }
3897 else
3898 {
3899 /* We had a match, so value number the vdef to have the value
3900 number of the vuse it came from. */
3901
3902 if (dump_file && (dump_flags & TDF_DETAILS))
3903 fprintf (dump_file, "Store matched earlier value, "
3904 "value numbering store vdefs to matching vuses.\n");
3905
3906 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3907 }
3908
3909