1 | /* SSA operands management for trees. |
2 | Copyright (C) 2003-2024 Free Software Foundation, Inc. |
3 | |
4 | This file is part of GCC. |
5 | |
6 | GCC is free software; you can redistribute it and/or modify |
7 | it under the terms of the GNU General Public License as published by |
8 | the Free Software Foundation; either version 3, or (at your option) |
9 | any later version. |
10 | |
11 | GCC is distributed in the hope that it will be useful, |
12 | but WITHOUT ANY WARRANTY; without even the implied warranty of |
13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
14 | GNU General Public License for more details. |
15 | |
16 | You should have received a copy of the GNU General Public License |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ |
19 | |
20 | #include "config.h" |
21 | #include "system.h" |
22 | #include "coretypes.h" |
23 | #include "backend.h" |
24 | #include "tree.h" |
25 | #include "gimple.h" |
26 | #include "timevar.h" |
27 | #include "ssa.h" |
28 | #include "gimple-pretty-print.h" |
29 | #include "diagnostic-core.h" |
30 | #include "stmt.h" |
31 | #include "print-tree.h" |
32 | #include "dumpfile.h" |
33 | |
34 | |
35 | /* This file contains the code required to manage the operands cache of the |
36 | SSA optimizer. For every stmt, we maintain an operand cache in the stmt |
37 | annotation. This cache contains operands that will be of interest to |
38 | optimizers and other passes wishing to manipulate the IL. |
39 | |
40 | The operand type are broken up into REAL and VIRTUAL operands. The real |
41 | operands are represented as pointers into the stmt's operand tree. Thus |
42 | any manipulation of the real operands will be reflected in the actual tree. |
43 | Virtual operands are represented solely in the cache, although the base |
44 | variable for the SSA_NAME may, or may not occur in the stmt's tree. |
45 | Manipulation of the virtual operands will not be reflected in the stmt tree. |
46 | |
47 | The routines in this file are concerned with creating this operand cache |
48 | from a stmt tree. |
49 | |
50 | The operand tree is the parsed by the various get_* routines which look |
51 | through the stmt tree for the occurrence of operands which may be of |
52 | interest, and calls are made to the append_* routines whenever one is |
53 | found. There are 4 of these routines, each representing one of the |
54 | 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs. |
55 | |
56 | The append_* routines check for duplication, and simply keep a list of |
57 | unique objects for each operand type in the build_* extendable vectors. |
58 | |
59 | Once the stmt tree is completely parsed, the finalize_ssa_operands() |
60 | routine is called, which proceeds to perform the finalization routine |
61 | on each of the 4 operand vectors which have been built up. |
62 | |
63 | If the stmt had a previous operand cache, the finalization routines |
64 | attempt to match up the new operands with the old ones. If it's a perfect |
65 | match, the old vector is simply reused. If it isn't a perfect match, then |
66 | a new vector is created and the new operands are placed there. For |
67 | virtual operands, if the previous cache had SSA_NAME version of a |
68 | variable, and that same variable occurs in the same operands cache, then |
69 | the new cache vector will also get the same SSA_NAME. |
70 | |
71 | i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new |
72 | operand vector for VUSE, then the new vector will also be modified |
73 | such that it contains 'a_5' rather than 'a'. */ |
74 | |
75 | |
76 | /* Flags to describe operand properties in helpers. */ |
77 | |
78 | /* By default, operands are loaded. */ |
79 | #define opf_use 0 |
80 | |
81 | /* Operand is the target of an assignment expression or a |
82 | call-clobbered variable. */ |
83 | #define opf_def (1 << 0) |
84 | |
85 | /* No virtual operands should be created in the expression. This is used |
86 | when traversing ADDR_EXPR nodes which have different semantics than |
87 | other expressions. Inside an ADDR_EXPR node, the only operands that we |
88 | need to consider are indices into arrays. For instance, &a.b[i] should |
89 | generate a USE of 'i' but it should not generate a VUSE for 'a' nor a |
90 | VUSE for 'b'. */ |
91 | #define opf_no_vops (1 << 1) |
92 | |
93 | /* Operand is in a place where address-taken does not imply addressable. */ |
94 | #define opf_non_addressable (1 << 3) |
95 | |
96 | /* Operand is in a place where opf_non_addressable does not apply. */ |
97 | #define opf_not_non_addressable (1 << 4) |
98 | |
99 | /* Operand is having its address taken. */ |
100 | #define opf_address_taken (1 << 5) |
101 | |
102 | /* Class containing temporary per-stmt state. */ |
103 | |
104 | class operands_scanner |
105 | { |
106 | public: |
107 | operands_scanner (struct function *fun, gimple *statement) |
108 | { |
109 | build_vuse = NULL_TREE; |
110 | build_vdef = NULL_TREE; |
111 | fn = fun; |
112 | stmt = statement; |
113 | } |
114 | |
115 | /* Create an operands cache for STMT. */ |
116 | void build_ssa_operands (); |
117 | |
118 | /* Verifies SSA statement operands. */ |
119 | DEBUG_FUNCTION bool verify_ssa_operands (); |
120 | |
121 | private: |
122 | /* Disable copy and assign of this class, as it may have problems with |
123 | build_uses vec. */ |
124 | DISABLE_COPY_AND_ASSIGN (operands_scanner); |
125 | |
126 | /* Array for building all the use operands. */ |
127 | auto_vec<tree *, 16> build_uses; |
128 | |
129 | /* The built VDEF operand. */ |
130 | tree build_vdef; |
131 | |
132 | /* The built VUSE operand. */ |
133 | tree build_vuse; |
134 | |
135 | /* Function which STMT belongs to. */ |
136 | struct function *fn; |
137 | |
138 | /* Statement to work on. */ |
139 | gimple *stmt; |
140 | |
141 | /* Takes elements from build_uses and turns them into use operands of STMT. */ |
142 | void finalize_ssa_uses (); |
143 | |
144 | /* Clear the in_list bits and empty the build array for VDEFs and |
145 | VUSEs. */ |
146 | void cleanup_build_arrays (); |
147 | |
148 | /* Finalize all the build vectors, fill the new ones into INFO. */ |
149 | void finalize_ssa_stmt_operands (); |
150 | |
151 | /* Start the process of building up operands vectors in INFO. */ |
152 | void start_ssa_stmt_operands (); |
153 | |
154 | /* Add USE_P to the list of pointers to operands. */ |
155 | void append_use (tree *use_p); |
156 | |
157 | /* Add VAR to the set of variables that require a VDEF operator. */ |
158 | void append_vdef (tree var); |
159 | |
160 | /* Add VAR to the set of variables that require a VUSE operator. */ |
161 | void append_vuse (tree var); |
162 | |
163 | /* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */ |
164 | void add_virtual_operand (int flags); |
165 | |
166 | |
167 | /* Add *VAR_P to the appropriate operand array for statement STMT. |
168 | FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register, |
169 | it will be added to the statement's real operands, otherwise it is |
170 | added to virtual operands. */ |
171 | void add_stmt_operand (tree *var_p, int flags); |
172 | |
173 | /* A subroutine of get_expr_operands to handle MEM_REF. |
174 | |
175 | STMT is the statement being processed, EXPR is the MEM_REF |
176 | that got us here. |
177 | |
178 | FLAGS is as in get_expr_operands. */ |
179 | void get_mem_ref_operands (tree expr, int flags); |
180 | |
181 | /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */ |
182 | void get_tmr_operands (tree expr, int flags); |
183 | |
184 | |
185 | /* If STMT is a call that may clobber globals and other symbols that |
186 | escape, add them to the VDEF/VUSE lists for it. */ |
187 | void maybe_add_call_vops (gcall *stmt); |
188 | |
189 | /* Scan operands in the ASM_EXPR stmt referred to in INFO. */ |
190 | void get_asm_stmt_operands (gasm *stmt); |
191 | |
192 | |
193 | /* Recursively scan the expression pointed to by EXPR_P in statement |
194 | STMT. FLAGS is one of the OPF_* constants modifying how to |
195 | interpret the operands found. */ |
196 | void get_expr_operands (tree *expr_p, int flags); |
197 | |
198 | /* Parse STMT looking for operands. When finished, the various |
199 | build_* operand vectors will have potential operands in them. */ |
200 | void parse_ssa_operands (); |
201 | |
202 | |
203 | /* Takes elements from build_defs and turns them into def operands of STMT. |
204 | TODO -- Make build_defs vec of tree *. */ |
205 | void finalize_ssa_defs (); |
206 | }; |
207 | |
208 | /* Accessor to tree-ssa-operands.cc caches. */ |
209 | static inline struct ssa_operands * |
210 | gimple_ssa_operands (const struct function *fun) |
211 | { |
212 | return &fun->gimple_df->ssa_operands; |
213 | } |
214 | |
215 | |
216 | /* Return true if the SSA operands cache is active. */ |
217 | |
218 | bool |
219 | ssa_operands_active (struct function *fun) |
220 | { |
221 | if (fun == NULL) |
222 | return false; |
223 | |
224 | return fun->gimple_df && gimple_ssa_operands (fun)->ops_active; |
225 | } |
226 | |
227 | |
228 | /* Create the VOP variable, an artificial global variable to act as a |
229 | representative of all of the virtual operands FUD chain. */ |
230 | |
231 | static void |
232 | create_vop_var (struct function *fn) |
233 | { |
234 | tree global_var; |
235 | |
236 | gcc_assert (fn->gimple_df->vop == NULL_TREE); |
237 | |
238 | global_var = build_decl (BUILTINS_LOCATION, VAR_DECL, |
239 | get_identifier (".MEM" ), |
240 | void_type_node); |
241 | DECL_ARTIFICIAL (global_var) = 1; |
242 | DECL_IGNORED_P (global_var) = 1; |
243 | TREE_READONLY (global_var) = 0; |
244 | DECL_EXTERNAL (global_var) = 1; |
245 | TREE_STATIC (global_var) = 1; |
246 | TREE_USED (global_var) = 1; |
247 | DECL_CONTEXT (global_var) = NULL_TREE; |
248 | TREE_THIS_VOLATILE (global_var) = 0; |
249 | TREE_ADDRESSABLE (global_var) = 0; |
250 | VAR_DECL_IS_VIRTUAL_OPERAND (global_var) = 1; |
251 | |
252 | fn->gimple_df->vop = global_var; |
253 | } |
254 | |
255 | /* These are the sizes of the operand memory buffer in bytes which gets |
256 | allocated each time more operands space is required. The final value is |
257 | the amount that is allocated every time after that. |
258 | In 1k we can fit 25 use operands (or 63 def operands) on a host with |
259 | 8 byte pointers, that would be 10 statements each with 1 def and 2 |
260 | uses. */ |
261 | |
262 | #define OP_SIZE_INIT 0 |
263 | #define OP_SIZE_1 (1024 - sizeof (void *)) |
264 | #define OP_SIZE_2 (1024 * 4 - sizeof (void *)) |
265 | #define OP_SIZE_3 (1024 * 16 - sizeof (void *)) |
266 | |
267 | /* Initialize the operand cache routines. */ |
268 | |
269 | void |
270 | init_ssa_operands (struct function *fn) |
271 | { |
272 | gcc_assert (gimple_ssa_operands (fn)->operand_memory == NULL); |
273 | gimple_ssa_operands (fun: fn)->operand_memory_index |
274 | = gimple_ssa_operands (fun: fn)->ssa_operand_mem_size; |
275 | gimple_ssa_operands (fun: fn)->ops_active = true; |
276 | gimple_ssa_operands (fun: fn)->ssa_operand_mem_size = OP_SIZE_INIT; |
277 | create_vop_var (fn); |
278 | } |
279 | |
280 | |
281 | /* Dispose of anything required by the operand routines. */ |
282 | |
283 | void |
284 | fini_ssa_operands (struct function *fn) |
285 | { |
286 | struct ssa_operand_memory_d *ptr; |
287 | |
288 | gimple_ssa_operands (fun: fn)->free_uses = NULL; |
289 | |
290 | while ((ptr = gimple_ssa_operands (fun: fn)->operand_memory) != NULL) |
291 | { |
292 | gimple_ssa_operands (fun: fn)->operand_memory |
293 | = gimple_ssa_operands (fun: fn)->operand_memory->next; |
294 | ggc_free (ptr); |
295 | } |
296 | |
297 | gimple_ssa_operands (fun: fn)->ops_active = false; |
298 | |
299 | fn->gimple_df->vop = NULL_TREE; |
300 | } |
301 | |
302 | |
303 | /* Return memory for an operand of size SIZE. */ |
304 | |
305 | static inline void * |
306 | ssa_operand_alloc (struct function *fn, unsigned size) |
307 | { |
308 | char *ptr; |
309 | |
310 | gcc_assert (size == sizeof (struct use_optype_d)); |
311 | |
312 | if (gimple_ssa_operands (fun: fn)->operand_memory_index + size |
313 | >= gimple_ssa_operands (fun: fn)->ssa_operand_mem_size) |
314 | { |
315 | struct ssa_operand_memory_d *ptr; |
316 | |
317 | switch (gimple_ssa_operands (fun: fn)->ssa_operand_mem_size) |
318 | { |
319 | case OP_SIZE_INIT: |
320 | gimple_ssa_operands (fun: fn)->ssa_operand_mem_size = OP_SIZE_1; |
321 | break; |
322 | case OP_SIZE_1: |
323 | gimple_ssa_operands (fun: fn)->ssa_operand_mem_size = OP_SIZE_2; |
324 | break; |
325 | case OP_SIZE_2: |
326 | case OP_SIZE_3: |
327 | gimple_ssa_operands (fun: fn)->ssa_operand_mem_size = OP_SIZE_3; |
328 | break; |
329 | default: |
330 | gcc_unreachable (); |
331 | } |
332 | |
333 | |
334 | ptr = (ssa_operand_memory_d *) ggc_internal_alloc |
335 | (s: sizeof (void *) + gimple_ssa_operands (fun: fn)->ssa_operand_mem_size); |
336 | |
337 | ptr->next = gimple_ssa_operands (fun: fn)->operand_memory; |
338 | gimple_ssa_operands (fun: fn)->operand_memory = ptr; |
339 | gimple_ssa_operands (fun: fn)->operand_memory_index = 0; |
340 | } |
341 | |
342 | ptr = &(gimple_ssa_operands (fun: fn)->operand_memory |
343 | ->mem[gimple_ssa_operands (fun: fn)->operand_memory_index]); |
344 | gimple_ssa_operands (fun: fn)->operand_memory_index += size; |
345 | return ptr; |
346 | } |
347 | |
348 | |
349 | /* Allocate a USE operand. */ |
350 | |
351 | static inline struct use_optype_d * |
352 | alloc_use (struct function *fn) |
353 | { |
354 | struct use_optype_d *ret; |
355 | if (gimple_ssa_operands (fun: fn)->free_uses) |
356 | { |
357 | ret = gimple_ssa_operands (fun: fn)->free_uses; |
358 | gimple_ssa_operands (fun: fn)->free_uses |
359 | = gimple_ssa_operands (fun: fn)->free_uses->next; |
360 | } |
361 | else |
362 | ret = (struct use_optype_d *) |
363 | ssa_operand_alloc (fn, size: sizeof (struct use_optype_d)); |
364 | return ret; |
365 | } |
366 | |
367 | |
368 | /* Adds OP to the list of uses of statement STMT after LAST. */ |
369 | |
370 | static inline use_optype_p |
371 | add_use_op (struct function *fn, gimple *stmt, tree *op, use_optype_p last) |
372 | { |
373 | use_optype_p new_use; |
374 | |
375 | new_use = alloc_use (fn); |
376 | USE_OP_PTR (new_use)->use = op; |
377 | link_imm_use_stmt (USE_OP_PTR (new_use), def: *op, stmt); |
378 | last->next = new_use; |
379 | new_use->next = NULL; |
380 | return new_use; |
381 | } |
382 | |
383 | |
384 | |
385 | /* Takes elements from build_defs and turns them into def operands of STMT. |
386 | TODO -- Make build_defs vec of tree *. */ |
387 | |
388 | inline void |
389 | operands_scanner::finalize_ssa_defs () |
390 | { |
391 | /* Pre-pend the vdef we may have built. */ |
392 | if (build_vdef != NULL_TREE) |
393 | { |
394 | tree oldvdef = gimple_vdef (g: stmt); |
395 | if (oldvdef |
396 | && TREE_CODE (oldvdef) == SSA_NAME) |
397 | oldvdef = SSA_NAME_VAR (oldvdef); |
398 | if (oldvdef != build_vdef) |
399 | gimple_set_vdef (g: stmt, vdef: build_vdef); |
400 | } |
401 | |
402 | /* Clear and unlink a no longer necessary VDEF. */ |
403 | if (build_vdef == NULL_TREE |
404 | && gimple_vdef (g: stmt) != NULL_TREE) |
405 | { |
406 | if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME) |
407 | { |
408 | unlink_stmt_vdef (stmt); |
409 | release_ssa_name_fn (fn, gimple_vdef (g: stmt)); |
410 | } |
411 | gimple_set_vdef (g: stmt, NULL_TREE); |
412 | } |
413 | |
414 | /* If we have a non-SSA_NAME VDEF, mark it for renaming. */ |
415 | if (gimple_vdef (g: stmt) |
416 | && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME) |
417 | { |
418 | fn->gimple_df->rename_vops = 1; |
419 | fn->gimple_df->ssa_renaming_needed = 1; |
420 | } |
421 | } |
422 | |
423 | |
424 | /* Takes elements from build_uses and turns them into use operands of STMT. */ |
425 | |
426 | inline void |
427 | operands_scanner::finalize_ssa_uses () |
428 | { |
429 | unsigned new_i; |
430 | struct use_optype_d new_list; |
431 | use_optype_p old_ops, ptr, last; |
432 | |
433 | /* Pre-pend the VUSE we may have built. */ |
434 | if (build_vuse != NULL_TREE) |
435 | { |
436 | tree oldvuse = gimple_vuse (g: stmt); |
437 | if (oldvuse |
438 | && TREE_CODE (oldvuse) == SSA_NAME) |
439 | oldvuse = SSA_NAME_VAR (oldvuse); |
440 | if (oldvuse != (build_vuse != NULL_TREE |
441 | ? build_vuse : build_vdef)) |
442 | gimple_set_vuse (g: stmt, NULL_TREE); |
443 | build_uses.safe_insert (ix: 0, obj: gimple_vuse_ptr (g: stmt)); |
444 | } |
445 | |
446 | new_list.next = NULL; |
447 | last = &new_list; |
448 | |
449 | old_ops = gimple_use_ops (g: stmt); |
450 | |
451 | /* Clear a no longer necessary VUSE. */ |
452 | if (build_vuse == NULL_TREE |
453 | && gimple_vuse (g: stmt) != NULL_TREE) |
454 | gimple_set_vuse (g: stmt, NULL_TREE); |
455 | |
456 | /* If there is anything in the old list, free it. */ |
457 | if (old_ops) |
458 | { |
459 | for (ptr = old_ops; ptr->next; ptr = ptr->next) |
460 | delink_imm_use (USE_OP_PTR (ptr)); |
461 | delink_imm_use (USE_OP_PTR (ptr)); |
462 | ptr->next = gimple_ssa_operands (fun: fn)->free_uses; |
463 | gimple_ssa_operands (fun: fn)->free_uses = old_ops; |
464 | } |
465 | |
466 | /* If we added a VUSE, make sure to set the operand if it is not already |
467 | present and mark it for renaming. */ |
468 | if (build_vuse != NULL_TREE |
469 | && gimple_vuse (g: stmt) == NULL_TREE) |
470 | { |
471 | gimple_set_vuse (g: stmt, vuse: gimple_vop (fun: fn)); |
472 | fn->gimple_df->rename_vops = 1; |
473 | fn->gimple_df->ssa_renaming_needed = 1; |
474 | } |
475 | |
476 | /* Now create nodes for all the new nodes. */ |
477 | for (new_i = 0; new_i < build_uses.length (); new_i++) |
478 | { |
479 | tree *op = build_uses[new_i]; |
480 | last = add_use_op (fn, stmt, op, last); |
481 | } |
482 | |
483 | /* Now set the stmt's operands. */ |
484 | gimple_set_use_ops (g: stmt, use: new_list.next); |
485 | } |
486 | |
487 | |
488 | /* Clear the in_list bits and empty the build array for VDEFs and |
489 | VUSEs. */ |
490 | |
491 | inline void |
492 | operands_scanner::cleanup_build_arrays () |
493 | { |
494 | build_vdef = NULL_TREE; |
495 | build_vuse = NULL_TREE; |
496 | build_uses.truncate (size: 0); |
497 | } |
498 | |
499 | |
500 | /* Finalize all the build vectors, fill the new ones into INFO. */ |
501 | |
502 | inline void |
503 | operands_scanner::finalize_ssa_stmt_operands () |
504 | { |
505 | finalize_ssa_defs (); |
506 | finalize_ssa_uses (); |
507 | cleanup_build_arrays (); |
508 | } |
509 | |
510 | |
511 | /* Start the process of building up operands vectors in INFO. */ |
512 | |
513 | inline void |
514 | operands_scanner::start_ssa_stmt_operands () |
515 | { |
516 | gcc_assert (build_uses.length () == 0); |
517 | gcc_assert (build_vuse == NULL_TREE); |
518 | gcc_assert (build_vdef == NULL_TREE); |
519 | } |
520 | |
521 | |
522 | /* Add USE_P to the list of pointers to operands. */ |
523 | |
524 | inline void |
525 | operands_scanner::append_use (tree *use_p) |
526 | { |
527 | build_uses.safe_push (obj: use_p); |
528 | } |
529 | |
530 | |
531 | /* Add VAR to the set of variables that require a VDEF operator. */ |
532 | |
533 | inline void |
534 | operands_scanner::append_vdef (tree var) |
535 | { |
536 | gcc_assert ((build_vdef == NULL_TREE |
537 | || build_vdef == var) |
538 | && (build_vuse == NULL_TREE |
539 | || build_vuse == var)); |
540 | |
541 | build_vdef = var; |
542 | build_vuse = var; |
543 | } |
544 | |
545 | |
546 | /* Add VAR to the set of variables that require a VUSE operator. */ |
547 | |
548 | inline void |
549 | operands_scanner::append_vuse (tree var) |
550 | { |
551 | gcc_assert (build_vuse == NULL_TREE |
552 | || build_vuse == var); |
553 | |
554 | build_vuse = var; |
555 | } |
556 | |
557 | /* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */ |
558 | |
559 | void |
560 | operands_scanner::add_virtual_operand (int flags) |
561 | { |
562 | /* Add virtual operands to the stmt, unless the caller has specifically |
563 | requested not to do that (used when adding operands inside an |
564 | ADDR_EXPR expression). */ |
565 | if (flags & opf_no_vops) |
566 | return; |
567 | |
568 | gcc_assert (!is_gimple_debug (stmt)); |
569 | |
570 | if (flags & opf_def) |
571 | append_vdef (var: gimple_vop (fun: fn)); |
572 | else |
573 | append_vuse (var: gimple_vop (fun: fn)); |
574 | } |
575 | |
576 | |
577 | /* Add *VAR_P to the appropriate operand array for statement STMT. |
578 | FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register, |
579 | it will be added to the statement's real operands, otherwise it is |
580 | added to virtual operands. */ |
581 | |
582 | void |
583 | operands_scanner::add_stmt_operand (tree *var_p, int flags) |
584 | { |
585 | tree var = *var_p; |
586 | |
587 | gcc_assert (SSA_VAR_P (*var_p) |
588 | || TREE_CODE (*var_p) == STRING_CST |
589 | || TREE_CODE (*var_p) == CONST_DECL); |
590 | |
591 | if (is_gimple_reg (var)) |
592 | { |
593 | /* The variable is a GIMPLE register. Add it to real operands. */ |
594 | if (flags & opf_def) |
595 | ; |
596 | else |
597 | append_use (use_p: var_p); |
598 | if (DECL_P (*var_p)) |
599 | fn->gimple_df->ssa_renaming_needed = 1; |
600 | } |
601 | else |
602 | { |
603 | /* Mark statements with volatile operands. */ |
604 | if (!(flags & opf_no_vops) |
605 | && TREE_THIS_VOLATILE (var)) |
606 | gimple_set_has_volatile_ops (stmt, volatilep: true); |
607 | |
608 | /* The variable is a memory access. Add virtual operands. */ |
609 | add_virtual_operand (flags); |
610 | } |
611 | } |
612 | |
613 | /* Mark the base address of REF as having its address taken. |
614 | REF may be a single variable whose address has been taken or any |
615 | other valid GIMPLE memory reference (structure reference, array, |
616 | etc). */ |
617 | |
618 | static void |
619 | mark_address_taken (tree ref) |
620 | { |
621 | tree var; |
622 | |
623 | /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF |
624 | as the only thing we take the address of. If VAR is a structure, |
625 | taking the address of a field means that the whole structure may |
626 | be referenced using pointer arithmetic. See PR 21407 and the |
627 | ensuing mailing list discussion. */ |
628 | var = get_base_address (t: ref); |
629 | if (VAR_P (var) |
630 | || TREE_CODE (var) == RESULT_DECL |
631 | || TREE_CODE (var) == PARM_DECL) |
632 | TREE_ADDRESSABLE (var) = 1; |
633 | } |
634 | |
635 | |
636 | /* A subroutine of get_expr_operands to handle MEM_REF. |
637 | |
638 | STMT is the statement being processed, EXPR is the MEM_REF |
639 | that got us here. |
640 | |
641 | FLAGS is as in get_expr_operands. */ |
642 | |
643 | void |
644 | operands_scanner::get_mem_ref_operands (tree expr, int flags) |
645 | { |
646 | tree *pptr = &TREE_OPERAND (expr, 0); |
647 | |
648 | if (!(flags & opf_no_vops) |
649 | && TREE_THIS_VOLATILE (expr)) |
650 | gimple_set_has_volatile_ops (stmt, volatilep: true); |
651 | |
652 | /* Add the VOP. */ |
653 | add_virtual_operand (flags); |
654 | |
655 | /* If requested, add a USE operand for the base pointer. */ |
656 | get_expr_operands (expr_p: pptr, |
657 | opf_non_addressable | opf_use |
658 | | (flags & (opf_no_vops|opf_not_non_addressable))); |
659 | } |
660 | |
661 | |
662 | /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */ |
663 | |
664 | void |
665 | operands_scanner::get_tmr_operands(tree expr, int flags) |
666 | { |
667 | if (!(flags & opf_no_vops) |
668 | && TREE_THIS_VOLATILE (expr)) |
669 | gimple_set_has_volatile_ops (stmt, volatilep: true); |
670 | |
671 | /* First record the real operands. */ |
672 | get_expr_operands (expr_p: &TMR_BASE (expr), |
673 | opf_non_addressable | opf_use |
674 | | (flags & (opf_no_vops|opf_not_non_addressable))); |
675 | get_expr_operands (expr_p: &TMR_INDEX (expr), opf_use | (flags & opf_no_vops)); |
676 | get_expr_operands (expr_p: &TMR_INDEX2 (expr), opf_use | (flags & opf_no_vops)); |
677 | |
678 | add_virtual_operand (flags); |
679 | } |
680 | |
681 | |
682 | /* If STMT is a call that may clobber globals and other symbols that |
683 | escape, add them to the VDEF/VUSE lists for it. */ |
684 | |
685 | void |
686 | operands_scanner::maybe_add_call_vops (gcall *stmt) |
687 | { |
688 | int call_flags = gimple_call_flags (stmt); |
689 | |
690 | /* If aliases have been computed already, add VDEF or VUSE |
691 | operands for all the symbols that have been found to be |
692 | call-clobbered. */ |
693 | if (!(call_flags & ECF_NOVOPS)) |
694 | { |
695 | /* A 'pure' or a 'const' function never call-clobbers anything. */ |
696 | if (!(call_flags & (ECF_PURE | ECF_CONST))) |
697 | add_virtual_operand (opf_def); |
698 | else if (!(call_flags & ECF_CONST)) |
699 | add_virtual_operand (opf_use); |
700 | } |
701 | } |
702 | |
703 | |
704 | /* Scan operands in the ASM_EXPR stmt referred to in INFO. */ |
705 | |
706 | void |
707 | operands_scanner::get_asm_stmt_operands (gasm *stmt) |
708 | { |
709 | size_t i, noutputs; |
710 | const char **oconstraints; |
711 | const char *constraint; |
712 | bool allows_mem, allows_reg, is_inout; |
713 | |
714 | noutputs = gimple_asm_noutputs (asm_stmt: stmt); |
715 | oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *)); |
716 | |
717 | /* Gather all output operands. */ |
718 | for (i = 0; i < gimple_asm_noutputs (asm_stmt: stmt); i++) |
719 | { |
720 | tree link = gimple_asm_output_op (asm_stmt: stmt, index: i); |
721 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); |
722 | oconstraints[i] = constraint; |
723 | parse_output_constraint (&constraint, i, 0, 0, &allows_mem, |
724 | &allows_reg, &is_inout); |
725 | |
726 | /* This should have been split in gimplify_asm_expr. */ |
727 | gcc_assert (!allows_reg || !is_inout); |
728 | |
729 | /* Memory operands are addressable. Note that STMT needs the |
730 | address of this operand. */ |
731 | if (!allows_reg && allows_mem) |
732 | mark_address_taken (TREE_VALUE (link)); |
733 | |
734 | get_expr_operands (expr_p: &TREE_VALUE (link), opf_def | opf_not_non_addressable); |
735 | } |
736 | |
737 | /* Gather all input operands. */ |
738 | for (i = 0; i < gimple_asm_ninputs (asm_stmt: stmt); i++) |
739 | { |
740 | tree link = gimple_asm_input_op (asm_stmt: stmt, index: i); |
741 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); |
742 | parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints, |
743 | &allows_mem, &allows_reg); |
744 | |
745 | /* Memory operands are addressable. Note that STMT needs the |
746 | address of this operand. */ |
747 | if (!allows_reg && allows_mem) |
748 | mark_address_taken (TREE_VALUE (link)); |
749 | |
750 | get_expr_operands (expr_p: &TREE_VALUE (link), opf_not_non_addressable); |
751 | } |
752 | |
753 | /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */ |
754 | if (gimple_asm_clobbers_memory_p (stmt)) |
755 | add_virtual_operand (opf_def); |
756 | } |
757 | |
758 | |
759 | /* Recursively scan the expression pointed to by EXPR_P in statement |
760 | STMT. FLAGS is one of the OPF_* constants modifying how to |
761 | interpret the operands found. */ |
762 | |
763 | void |
764 | operands_scanner::get_expr_operands (tree *expr_p, int flags) |
765 | { |
766 | enum tree_code code; |
767 | enum tree_code_class codeclass; |
768 | tree expr = *expr_p; |
769 | int uflags = opf_use; |
770 | |
771 | if (expr == NULL) |
772 | return; |
773 | |
774 | if (is_gimple_debug (gs: stmt)) |
775 | uflags |= (flags & opf_no_vops); |
776 | |
777 | code = TREE_CODE (expr); |
778 | codeclass = TREE_CODE_CLASS (code); |
779 | |
780 | switch (code) |
781 | { |
782 | case ADDR_EXPR: |
783 | /* Taking the address of a variable does not represent a |
784 | reference to it, but the fact that the statement takes its |
785 | address will be of interest to some passes (e.g. alias |
786 | resolution). */ |
787 | if ((!(flags & opf_non_addressable) |
788 | || (flags & opf_not_non_addressable)) |
789 | && !is_gimple_debug (gs: stmt)) |
790 | mark_address_taken (TREE_OPERAND (expr, 0)); |
791 | |
792 | /* Otherwise, there may be variables referenced inside but there |
793 | should be no VUSEs created, since the referenced objects are |
794 | not really accessed. The only operands that we should find |
795 | here are ARRAY_REF indices which will always be real operands |
796 | (GIMPLE does not allow non-registers as array indices). */ |
797 | flags |= opf_no_vops; |
798 | get_expr_operands (expr_p: &TREE_OPERAND (expr, 0), |
799 | flags: flags | opf_not_non_addressable | opf_address_taken); |
800 | return; |
801 | |
802 | case SSA_NAME: |
803 | case VAR_DECL: |
804 | case PARM_DECL: |
805 | case RESULT_DECL: |
806 | case STRING_CST: |
807 | case CONST_DECL: |
808 | if (!(flags & opf_address_taken)) |
809 | add_stmt_operand (var_p: expr_p, flags); |
810 | return; |
811 | |
812 | case DEBUG_EXPR_DECL: |
813 | gcc_assert (gimple_debug_bind_p (stmt)); |
814 | return; |
815 | |
816 | case MEM_REF: |
817 | get_mem_ref_operands (expr, flags); |
818 | return; |
819 | |
820 | case TARGET_MEM_REF: |
821 | get_tmr_operands (expr, flags); |
822 | return; |
823 | |
824 | case ARRAY_REF: |
825 | case ARRAY_RANGE_REF: |
826 | case COMPONENT_REF: |
827 | case REALPART_EXPR: |
828 | case IMAGPART_EXPR: |
829 | { |
830 | if (!(flags & opf_no_vops) |
831 | && TREE_THIS_VOLATILE (expr)) |
832 | gimple_set_has_volatile_ops (stmt, volatilep: true); |
833 | |
834 | get_expr_operands (expr_p: &TREE_OPERAND (expr, 0), flags); |
835 | |
836 | if (code == COMPONENT_REF) |
837 | get_expr_operands (expr_p: &TREE_OPERAND (expr, 2), flags: uflags); |
838 | else if (code == ARRAY_REF || code == ARRAY_RANGE_REF) |
839 | { |
840 | get_expr_operands (expr_p: &TREE_OPERAND (expr, 1), flags: uflags); |
841 | get_expr_operands (expr_p: &TREE_OPERAND (expr, 2), flags: uflags); |
842 | get_expr_operands (expr_p: &TREE_OPERAND (expr, 3), flags: uflags); |
843 | } |
844 | |
845 | return; |
846 | } |
847 | |
848 | case WITH_SIZE_EXPR: |
849 | /* WITH_SIZE_EXPR is a pass-through reference to its first argument, |
850 | and an rvalue reference to its second argument. */ |
851 | get_expr_operands (expr_p: &TREE_OPERAND (expr, 1), flags: uflags); |
852 | get_expr_operands (expr_p: &TREE_OPERAND (expr, 0), flags); |
853 | return; |
854 | |
855 | case COND_EXPR: |
856 | case VEC_COND_EXPR: |
857 | case VEC_PERM_EXPR: |
858 | get_expr_operands (expr_p: &TREE_OPERAND (expr, 0), flags: uflags); |
859 | get_expr_operands (expr_p: &TREE_OPERAND (expr, 1), flags: uflags); |
860 | get_expr_operands (expr_p: &TREE_OPERAND (expr, 2), flags: uflags); |
861 | return; |
862 | |
863 | case CONSTRUCTOR: |
864 | { |
865 | /* General aggregate CONSTRUCTORs have been decomposed, but they |
866 | are still in use as the COMPLEX_EXPR equivalent for vectors. */ |
867 | constructor_elt *ce; |
868 | unsigned HOST_WIDE_INT idx; |
869 | |
870 | /* A volatile constructor is actually TREE_CLOBBER_P, transfer |
871 | the volatility to the statement, don't use TREE_CLOBBER_P for |
872 | mirroring the other uses of THIS_VOLATILE in this file. */ |
873 | if (!(flags & opf_no_vops) |
874 | && TREE_THIS_VOLATILE (expr)) |
875 | gimple_set_has_volatile_ops (stmt, volatilep: true); |
876 | |
877 | for (idx = 0; |
878 | vec_safe_iterate (CONSTRUCTOR_ELTS (expr), ix: idx, ptr: &ce); |
879 | idx++) |
880 | get_expr_operands (expr_p: &ce->value, flags: uflags); |
881 | |
882 | return; |
883 | } |
884 | |
885 | case BIT_FIELD_REF: |
886 | if (!(flags & opf_no_vops) |
887 | && TREE_THIS_VOLATILE (expr)) |
888 | gimple_set_has_volatile_ops (stmt, volatilep: true); |
889 | /* FALLTHRU */ |
890 | |
891 | case VIEW_CONVERT_EXPR: |
892 | do_unary: |
893 | get_expr_operands (expr_p: &TREE_OPERAND (expr, 0), flags); |
894 | return; |
895 | |
896 | case BIT_INSERT_EXPR: |
897 | case COMPOUND_EXPR: |
898 | case OBJ_TYPE_REF: |
899 | do_binary: |
900 | { |
901 | get_expr_operands (expr_p: &TREE_OPERAND (expr, 0), flags); |
902 | get_expr_operands (expr_p: &TREE_OPERAND (expr, 1), flags); |
903 | return; |
904 | } |
905 | |
906 | case DOT_PROD_EXPR: |
907 | case SAD_EXPR: |
908 | case REALIGN_LOAD_EXPR: |
909 | case WIDEN_MULT_PLUS_EXPR: |
910 | case WIDEN_MULT_MINUS_EXPR: |
911 | { |
912 | get_expr_operands (expr_p: &TREE_OPERAND (expr, 0), flags); |
913 | get_expr_operands (expr_p: &TREE_OPERAND (expr, 1), flags); |
914 | get_expr_operands (expr_p: &TREE_OPERAND (expr, 2), flags); |
915 | return; |
916 | } |
917 | |
918 | case FUNCTION_DECL: |
919 | case LABEL_DECL: |
920 | case CASE_LABEL_EXPR: |
921 | /* Expressions that make no memory references. */ |
922 | return; |
923 | |
924 | default: |
925 | if (codeclass == tcc_unary) |
926 | goto do_unary; |
927 | if (codeclass == tcc_binary || codeclass == tcc_comparison) |
928 | goto do_binary; |
929 | if (codeclass == tcc_constant || codeclass == tcc_type) |
930 | return; |
931 | } |
932 | |
933 | /* If we get here, something has gone wrong. */ |
934 | if (flag_checking) |
935 | { |
936 | fprintf (stderr, format: "unhandled expression in get_expr_operands():\n" ); |
937 | debug_tree (expr); |
938 | fputs (s: "\n" , stderr); |
939 | gcc_unreachable (); |
940 | } |
941 | } |
942 | |
943 | |
944 | /* Parse STMT looking for operands. When finished, the various |
945 | build_* operand vectors will have potential operands in them. */ |
946 | |
947 | void |
948 | operands_scanner::parse_ssa_operands () |
949 | { |
950 | enum gimple_code code = gimple_code (g: stmt); |
951 | size_t i, n, start = 0; |
952 | |
953 | switch (code) |
954 | { |
955 | case GIMPLE_ASM: |
956 | get_asm_stmt_operands (stmt: as_a <gasm *> (p: stmt)); |
957 | break; |
958 | |
959 | case GIMPLE_TRANSACTION: |
960 | /* The start of a transaction is a memory barrier. */ |
961 | add_virtual_operand (opf_def | opf_use); |
962 | break; |
963 | |
964 | case GIMPLE_DEBUG: |
965 | if (gimple_debug_bind_p (s: stmt) |
966 | && gimple_debug_bind_has_value_p (dbg: stmt)) |
967 | get_expr_operands (expr_p: gimple_debug_bind_get_value_ptr (dbg: stmt), |
968 | opf_use | opf_no_vops); |
969 | break; |
970 | |
971 | case GIMPLE_RETURN: |
972 | append_vuse (var: gimple_vop (fun: fn)); |
973 | goto do_default; |
974 | |
975 | case GIMPLE_CALL: |
976 | /* Add call-clobbered operands, if needed. */ |
977 | maybe_add_call_vops (stmt: as_a <gcall *> (p: stmt)); |
978 | /* FALLTHRU */ |
979 | |
980 | case GIMPLE_ASSIGN: |
981 | get_expr_operands (expr_p: gimple_op_ptr (gs: stmt, i: 0), opf_def); |
982 | start = 1; |
983 | /* FALLTHRU */ |
984 | |
985 | default: |
986 | do_default: |
987 | n = gimple_num_ops (gs: stmt); |
988 | for (i = start; i < n; i++) |
989 | get_expr_operands (expr_p: gimple_op_ptr (gs: stmt, i), opf_use); |
990 | break; |
991 | } |
992 | } |
993 | |
994 | |
995 | /* Create an operands cache for STMT. */ |
996 | |
997 | void |
998 | operands_scanner::build_ssa_operands () |
999 | { |
1000 | /* Initially assume that the statement has no volatile operands. */ |
1001 | gimple_set_has_volatile_ops (stmt, volatilep: false); |
1002 | |
1003 | start_ssa_stmt_operands (); |
1004 | parse_ssa_operands (); |
1005 | finalize_ssa_stmt_operands (); |
1006 | } |
1007 | |
1008 | /* Verifies SSA statement operands. */ |
1009 | |
1010 | DEBUG_FUNCTION bool |
1011 | operands_scanner::verify_ssa_operands () |
1012 | { |
1013 | use_operand_p use_p; |
1014 | def_operand_p def_p; |
1015 | ssa_op_iter iter; |
1016 | unsigned i; |
1017 | tree def; |
1018 | bool volatile_p = gimple_has_volatile_ops (stmt); |
1019 | |
1020 | /* build_ssa_operands w/o finalizing them. */ |
1021 | gimple_set_has_volatile_ops (stmt, volatilep: false); |
1022 | start_ssa_stmt_operands (); |
1023 | parse_ssa_operands (); |
1024 | |
1025 | /* Now verify the built operands are the same as present in STMT. */ |
1026 | def = gimple_vdef (g: stmt); |
1027 | if (def |
1028 | && TREE_CODE (def) == SSA_NAME) |
1029 | def = SSA_NAME_VAR (def); |
1030 | if (build_vdef != def) |
1031 | { |
1032 | error ("virtual definition of statement not up to date" ); |
1033 | return true; |
1034 | } |
1035 | if (gimple_vdef (g: stmt) |
1036 | && ((def_p = gimple_vdef_op (g: stmt)) == NULL_DEF_OPERAND_P |
1037 | || DEF_FROM_PTR (def_p) != gimple_vdef (g: stmt))) |
1038 | { |
1039 | error ("virtual def operand missing for statement" ); |
1040 | return true; |
1041 | } |
1042 | |
1043 | tree use = gimple_vuse (g: stmt); |
1044 | if (use |
1045 | && TREE_CODE (use) == SSA_NAME) |
1046 | use = SSA_NAME_VAR (use); |
1047 | if (build_vuse != use) |
1048 | { |
1049 | error ("virtual use of statement not up to date" ); |
1050 | return true; |
1051 | } |
1052 | if (gimple_vuse (g: stmt) |
1053 | && ((use_p = gimple_vuse_op (g: stmt)) == NULL_USE_OPERAND_P |
1054 | || USE_FROM_PTR (use_p) != gimple_vuse (g: stmt))) |
1055 | { |
1056 | error ("virtual use operand missing for statement" ); |
1057 | return true; |
1058 | } |
1059 | |
1060 | FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE) |
1061 | { |
1062 | tree *op; |
1063 | FOR_EACH_VEC_ELT (build_uses, i, op) |
1064 | { |
1065 | if (use_p->use == op) |
1066 | { |
1067 | build_uses[i] = NULL; |
1068 | break; |
1069 | } |
1070 | } |
1071 | if (i == build_uses.length ()) |
1072 | { |
1073 | error ("excess use operand for statement" ); |
1074 | debug_generic_expr (USE_FROM_PTR (use_p)); |
1075 | return true; |
1076 | } |
1077 | } |
1078 | |
1079 | tree *op; |
1080 | FOR_EACH_VEC_ELT (build_uses, i, op) |
1081 | if (op != NULL) |
1082 | { |
1083 | error ("use operand missing for statement" ); |
1084 | debug_generic_expr (*op); |
1085 | return true; |
1086 | } |
1087 | |
1088 | if (gimple_has_volatile_ops (stmt) != volatile_p) |
1089 | { |
1090 | error ("statement volatile flag not up to date" ); |
1091 | return true; |
1092 | } |
1093 | |
1094 | cleanup_build_arrays (); |
1095 | return false; |
1096 | } |
1097 | |
1098 | /* Interface for external use. */ |
1099 | |
1100 | DEBUG_FUNCTION bool |
1101 | verify_ssa_operands (struct function *fn, gimple *stmt) |
1102 | { |
1103 | return operands_scanner (fn, stmt).verify_ssa_operands (); |
1104 | } |
1105 | |
1106 | |
1107 | /* Releases the operands of STMT back to their freelists, and clears |
1108 | the stmt operand lists. */ |
1109 | |
1110 | void |
1111 | free_stmt_operands (struct function *fn, gimple *stmt) |
1112 | { |
1113 | use_optype_p uses = gimple_use_ops (g: stmt), last_use; |
1114 | |
1115 | if (uses) |
1116 | { |
1117 | for (last_use = uses; last_use->next; last_use = last_use->next) |
1118 | delink_imm_use (USE_OP_PTR (last_use)); |
1119 | delink_imm_use (USE_OP_PTR (last_use)); |
1120 | last_use->next = gimple_ssa_operands (fun: fn)->free_uses; |
1121 | gimple_ssa_operands (fun: fn)->free_uses = uses; |
1122 | gimple_set_use_ops (g: stmt, NULL); |
1123 | } |
1124 | |
1125 | if (gimple_has_mem_ops (g: stmt)) |
1126 | { |
1127 | gimple_set_vuse (g: stmt, NULL_TREE); |
1128 | gimple_set_vdef (g: stmt, NULL_TREE); |
1129 | } |
1130 | } |
1131 | |
1132 | |
1133 | /* Get the operands of statement STMT. */ |
1134 | |
1135 | void |
1136 | update_stmt_operands (struct function *fn, gimple *stmt) |
1137 | { |
1138 | /* If update_stmt_operands is called before SSA is initialized, do |
1139 | nothing. */ |
1140 | if (!ssa_operands_active (fun: fn)) |
1141 | return; |
1142 | |
1143 | timevar_push (tv: TV_TREE_OPS); |
1144 | |
1145 | gcc_assert (gimple_modified_p (stmt)); |
1146 | operands_scanner (fn, stmt).build_ssa_operands (); |
1147 | gimple_set_modified (s: stmt, modifiedp: false); |
1148 | |
1149 | timevar_pop (tv: TV_TREE_OPS); |
1150 | } |
1151 | |
1152 | |
1153 | /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done |
1154 | to test the validity of the swap operation. */ |
1155 | |
1156 | void |
1157 | swap_ssa_operands (gimple *stmt, tree *exp0, tree *exp1) |
1158 | { |
1159 | tree op0, op1; |
1160 | op0 = *exp0; |
1161 | op1 = *exp1; |
1162 | |
1163 | if (op0 != op1) |
1164 | { |
1165 | /* Attempt to preserve the relative positions of these two operands in |
1166 | their * respective immediate use lists by adjusting their use pointer |
1167 | to point to the new operand position. */ |
1168 | use_optype_p use0, use1, ptr; |
1169 | use0 = use1 = NULL; |
1170 | |
1171 | /* Find the 2 operands in the cache, if they are there. */ |
1172 | for (ptr = gimple_use_ops (g: stmt); ptr; ptr = ptr->next) |
1173 | if (USE_OP_PTR (ptr)->use == exp0) |
1174 | { |
1175 | use0 = ptr; |
1176 | break; |
1177 | } |
1178 | |
1179 | for (ptr = gimple_use_ops (g: stmt); ptr; ptr = ptr->next) |
1180 | if (USE_OP_PTR (ptr)->use == exp1) |
1181 | { |
1182 | use1 = ptr; |
1183 | break; |
1184 | } |
1185 | |
1186 | /* And adjust their location to point to the new position of the |
1187 | operand. */ |
1188 | if (use0) |
1189 | USE_OP_PTR (use0)->use = exp1; |
1190 | if (use1) |
1191 | USE_OP_PTR (use1)->use = exp0; |
1192 | |
1193 | /* Now swap the data. */ |
1194 | *exp0 = op1; |
1195 | *exp1 = op0; |
1196 | } |
1197 | } |
1198 | |
1199 | |
1200 | /* Scan the immediate_use list for VAR making sure its linked properly. |
1201 | Return TRUE if there is a problem and emit an error message to F. */ |
1202 | |
1203 | DEBUG_FUNCTION bool |
1204 | verify_imm_links (FILE *f, tree var) |
1205 | { |
1206 | use_operand_p ptr, prev, list; |
1207 | unsigned int count; |
1208 | |
1209 | gcc_assert (TREE_CODE (var) == SSA_NAME); |
1210 | |
1211 | list = &(SSA_NAME_IMM_USE_NODE (var)); |
1212 | gcc_assert (list->use == NULL); |
1213 | |
1214 | if (list->prev == NULL) |
1215 | { |
1216 | gcc_assert (list->next == NULL); |
1217 | return false; |
1218 | } |
1219 | |
1220 | prev = list; |
1221 | count = 0; |
1222 | for (ptr = list->next; ptr != list; ) |
1223 | { |
1224 | if (prev != ptr->prev) |
1225 | { |
1226 | fprintf (stream: f, format: "prev != ptr->prev\n" ); |
1227 | goto error; |
1228 | } |
1229 | |
1230 | if (ptr->use == NULL) |
1231 | { |
1232 | fprintf (stream: f, format: "ptr->use == NULL\n" ); |
1233 | goto error; /* 2 roots, or SAFE guard node. */ |
1234 | } |
1235 | else if (*(ptr->use) != var) |
1236 | { |
1237 | fprintf (stream: f, format: "*(ptr->use) != var\n" ); |
1238 | goto error; |
1239 | } |
1240 | |
1241 | prev = ptr; |
1242 | ptr = ptr->next; |
1243 | |
1244 | count++; |
1245 | if (count == 0) |
1246 | { |
1247 | fprintf (stream: f, format: "number of immediate uses doesn't fit unsigned int\n" ); |
1248 | goto error; |
1249 | } |
1250 | } |
1251 | |
1252 | /* Verify list in the other direction. */ |
1253 | prev = list; |
1254 | for (ptr = list->prev; ptr != list; ) |
1255 | { |
1256 | if (prev != ptr->next) |
1257 | { |
1258 | fprintf (stream: f, format: "prev != ptr->next\n" ); |
1259 | goto error; |
1260 | } |
1261 | prev = ptr; |
1262 | ptr = ptr->prev; |
1263 | if (count == 0) |
1264 | { |
1265 | fprintf (stream: f, format: "count-- < 0\n" ); |
1266 | goto error; |
1267 | } |
1268 | count--; |
1269 | } |
1270 | |
1271 | if (count != 0) |
1272 | { |
1273 | fprintf (stream: f, format: "count != 0\n" ); |
1274 | goto error; |
1275 | } |
1276 | |
1277 | return false; |
1278 | |
1279 | error: |
1280 | if (ptr->loc.stmt && gimple_modified_p (g: ptr->loc.stmt)) |
1281 | { |
1282 | fprintf (stream: f, format: " STMT MODIFIED. - <%p> " , (void *)ptr->loc.stmt); |
1283 | print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM); |
1284 | } |
1285 | fprintf (stream: f, format: " IMM ERROR : (use_p : tree - %p:%p)" , (void *)ptr, |
1286 | (void *)ptr->use); |
1287 | print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM); |
1288 | fprintf (stream: f, format: "\n" ); |
1289 | return true; |
1290 | } |
1291 | |
1292 | |
1293 | /* Dump all the immediate uses to FILE. */ |
1294 | |
1295 | void |
1296 | dump_immediate_uses_for (FILE *file, tree var) |
1297 | { |
1298 | imm_use_iterator iter; |
1299 | use_operand_p use_p; |
1300 | |
1301 | gcc_assert (var && TREE_CODE (var) == SSA_NAME); |
1302 | |
1303 | print_generic_expr (file, var, TDF_SLIM); |
1304 | fprintf (stream: file, format: " : -->" ); |
1305 | if (has_zero_uses (var)) |
1306 | fprintf (stream: file, format: " no uses.\n" ); |
1307 | else |
1308 | if (has_single_use (var)) |
1309 | fprintf (stream: file, format: " single use.\n" ); |
1310 | else |
1311 | fprintf (stream: file, format: "%d uses.\n" , num_imm_uses (var)); |
1312 | |
1313 | FOR_EACH_IMM_USE_FAST (use_p, iter, var) |
1314 | { |
1315 | if (use_p->loc.stmt == NULL && use_p->use == NULL) |
1316 | fprintf (stream: file, format: "***end of stmt iterator marker***\n" ); |
1317 | else |
1318 | if (!is_gimple_reg (USE_FROM_PTR (use_p))) |
1319 | print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS); |
1320 | else |
1321 | print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM); |
1322 | } |
1323 | fprintf (stream: file, format: "\n" ); |
1324 | } |
1325 | |
1326 | |
1327 | /* Dump all the immediate uses to FILE. */ |
1328 | |
1329 | void |
1330 | dump_immediate_uses (FILE *file) |
1331 | { |
1332 | tree var; |
1333 | unsigned int x; |
1334 | |
1335 | fprintf (stream: file, format: "Immediate_uses: \n\n" ); |
1336 | FOR_EACH_SSA_NAME (x, var, cfun) |
1337 | { |
1338 | dump_immediate_uses_for (file, var); |
1339 | } |
1340 | } |
1341 | |
1342 | |
1343 | /* Dump def-use edges on stderr. */ |
1344 | |
1345 | DEBUG_FUNCTION void |
1346 | debug_immediate_uses (void) |
1347 | { |
1348 | dump_immediate_uses (stderr); |
1349 | } |
1350 | |
1351 | |
1352 | /* Dump def-use edges on stderr. */ |
1353 | |
1354 | DEBUG_FUNCTION void |
1355 | debug_immediate_uses_for (tree var) |
1356 | { |
1357 | dump_immediate_uses_for (stderr, var); |
1358 | } |
1359 | |
1360 | |
1361 | /* Unlink STMTs virtual definition from the IL by propagating its use. */ |
1362 | |
1363 | void |
1364 | unlink_stmt_vdef (gimple *stmt) |
1365 | { |
1366 | use_operand_p use_p; |
1367 | imm_use_iterator iter; |
1368 | gimple *use_stmt; |
1369 | tree vdef = gimple_vdef (g: stmt); |
1370 | tree vuse = gimple_vuse (g: stmt); |
1371 | |
1372 | if (!vdef |
1373 | || TREE_CODE (vdef) != SSA_NAME) |
1374 | return; |
1375 | |
1376 | FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef) |
1377 | { |
1378 | FOR_EACH_IMM_USE_ON_STMT (use_p, iter) |
1379 | SET_USE (use_p, vuse); |
1380 | } |
1381 | |
1382 | if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)) |
1383 | SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1; |
1384 | } |
1385 | |
1386 | /* Return true if the var whose chain of uses starts at PTR has a |
1387 | single nondebug use. Set USE_P and STMT to that single nondebug |
1388 | use, if so, or to NULL otherwise. */ |
1389 | bool |
1390 | single_imm_use_1 (const ssa_use_operand_t *head, |
1391 | use_operand_p *use_p, gimple **stmt) |
1392 | { |
1393 | ssa_use_operand_t *ptr, *single_use = 0; |
1394 | |
1395 | for (ptr = head->next; ptr != head; ptr = ptr->next) |
1396 | if (USE_STMT(ptr) && !is_gimple_debug (USE_STMT (ptr))) |
1397 | { |
1398 | if (single_use) |
1399 | { |
1400 | single_use = NULL; |
1401 | break; |
1402 | } |
1403 | single_use = ptr; |
1404 | } |
1405 | |
1406 | if (use_p) |
1407 | *use_p = single_use; |
1408 | |
1409 | if (stmt) |
1410 | *stmt = single_use ? single_use->loc.stmt : NULL; |
1411 | |
1412 | return single_use; |
1413 | } |
1414 | |
1415 | |