1 | /* Header file for SSA iterators. |
2 | Copyright (C) 2013-2023 Free Software Foundation, Inc. |
3 | |
4 | This file is part of GCC. |
5 | |
6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free |
8 | Software Foundation; either version 3, or (at your option) any later |
9 | version. |
10 | |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
14 | for more details. |
15 | |
16 | You should have received a copy of the GNU General Public License |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ |
19 | |
20 | #ifndef GCC_SSA_ITERATORS_H |
21 | #define GCC_SSA_ITERATORS_H |
22 | |
23 | /* Immediate use lists are used to directly access all uses for an SSA |
24 | name and get pointers to the statement for each use. |
25 | |
26 | The structure ssa_use_operand_t consists of PREV and NEXT pointers |
27 | to maintain the list. A USE pointer, which points to address where |
28 | the use is located and a LOC pointer which can point to the |
29 | statement where the use is located, or, in the case of the root |
30 | node, it points to the SSA name itself. |
31 | |
32 | The list is anchored by an occurrence of ssa_operand_d *in* the |
33 | ssa_name node itself (named 'imm_uses'). This node is uniquely |
34 | identified by having a NULL USE pointer. and the LOC pointer |
35 | pointing back to the ssa_name node itself. This node forms the |
36 | base for a circular list, and initially this is the only node in |
37 | the list. |
38 | |
39 | Fast iteration allows each use to be examined, but does not allow |
40 | any modifications to the uses or stmts. |
41 | |
42 | Normal iteration allows insertion, deletion, and modification. the |
43 | iterator manages this by inserting a marker node into the list |
44 | immediately before the node currently being examined in the list. |
45 | this marker node is uniquely identified by having null stmt *and* a |
46 | null use pointer. |
47 | |
48 | When iterating to the next use, the iteration routines check to see |
49 | if the node after the marker has changed. if it has, then the node |
50 | following the marker is now the next one to be visited. if not, the |
51 | marker node is moved past that node in the list (visualize it as |
52 | bumping the marker node through the list). this continues until |
53 | the marker node is moved to the original anchor position. the |
54 | marker node is then removed from the list. |
55 | |
56 | If iteration is halted early, the marker node must be removed from |
57 | the list before continuing. */ |
58 | struct imm_use_iterator |
59 | { |
60 | /* This is the current use the iterator is processing. */ |
61 | ssa_use_operand_t *imm_use; |
62 | /* This marks the last use in the list (use node from SSA_NAME) */ |
63 | ssa_use_operand_t *end_p; |
64 | /* This node is inserted and used to mark the end of the uses for a stmt. */ |
65 | ssa_use_operand_t iter_node; |
66 | /* This is the next ssa_name to visit. IMM_USE may get removed before |
67 | the next one is traversed to, so it must be cached early. */ |
68 | ssa_use_operand_t *next_imm_name; |
69 | }; |
70 | |
71 | |
72 | /* Use this iterator when simply looking at stmts. Adding, deleting or |
73 | modifying stmts will cause this iterator to malfunction. */ |
74 | |
75 | #define FOR_EACH_IMM_USE_FAST(DEST, ITER, SSAVAR) \ |
76 | for ((DEST) = first_readonly_imm_use (&(ITER), (SSAVAR)); \ |
77 | !end_readonly_imm_use_p (&(ITER)); \ |
78 | (void) ((DEST) = next_readonly_imm_use (&(ITER)))) |
79 | |
80 | /* Forward declare for use in the class below. */ |
81 | inline void end_imm_use_stmt_traverse (imm_use_iterator *); |
82 | |
83 | /* arrange to automatically call, upon descruction, end_imm_use_stmt_traverse |
84 | with a given pointer to imm_use_iterator. */ |
85 | struct auto_end_imm_use_stmt_traverse |
86 | { |
87 | imm_use_iterator *imm; |
88 | auto_end_imm_use_stmt_traverse (imm_use_iterator *imm) |
89 | : imm (imm) {} |
90 | ~auto_end_imm_use_stmt_traverse () |
91 | { end_imm_use_stmt_traverse (imm); } |
92 | }; |
93 | |
94 | /* Use this iterator to visit each stmt which has a use of SSAVAR. The |
95 | destructor of the auto_end_imm_use_stmt_traverse object deals with removing |
96 | ITER from SSAVAR's IMM_USE list even when leaving the scope early. */ |
97 | |
98 | #define FOR_EACH_IMM_USE_STMT(STMT, ITER, SSAVAR) \ |
99 | for (struct auto_end_imm_use_stmt_traverse \ |
100 | auto_end_imm_use_stmt_traverse \ |
101 | ((((STMT) = first_imm_use_stmt (&(ITER), (SSAVAR))), \ |
102 | &(ITER))); \ |
103 | !end_imm_use_stmt_p (&(ITER)); \ |
104 | (void) ((STMT) = next_imm_use_stmt (&(ITER)))) |
105 | |
106 | /* Use this iterator in combination with FOR_EACH_IMM_USE_STMT to |
107 | get access to each occurrence of ssavar on the stmt returned by |
108 | that iterator.. for instance: |
109 | |
110 | FOR_EACH_IMM_USE_STMT (stmt, iter, ssavar) |
111 | { |
112 | FOR_EACH_IMM_USE_ON_STMT (use_p, iter) |
113 | { |
114 | SET_USE (use_p, blah); |
115 | } |
116 | update_stmt (stmt); |
117 | } */ |
118 | |
119 | #define FOR_EACH_IMM_USE_ON_STMT(DEST, ITER) \ |
120 | for ((DEST) = first_imm_use_on_stmt (&(ITER)); \ |
121 | !end_imm_use_on_stmt_p (&(ITER)); \ |
122 | (void) ((DEST) = next_imm_use_on_stmt (&(ITER)))) |
123 | |
124 | |
125 | |
126 | extern bool single_imm_use_1 (const ssa_use_operand_t *head, |
127 | use_operand_p *use_p, gimple **stmt); |
128 | |
129 | |
130 | enum ssa_op_iter_type { |
131 | ssa_op_iter_none = 0, |
132 | ssa_op_iter_tree, |
133 | ssa_op_iter_use, |
134 | ssa_op_iter_def |
135 | }; |
136 | |
137 | /* This structure is used in the operand iterator loops. It contains the |
138 | items required to determine which operand is retrieved next. During |
139 | optimization, this structure is scalarized, and any unused fields are |
140 | optimized away, resulting in little overhead. */ |
141 | |
142 | struct ssa_op_iter |
143 | { |
144 | enum ssa_op_iter_type iter_type; |
145 | bool done; |
146 | int flags; |
147 | unsigned i; |
148 | unsigned numops; |
149 | use_optype_p uses; |
150 | gimple *stmt; |
151 | }; |
152 | |
153 | /* NOTE: Keep these in sync with doc/tree-ssa.texi. */ |
154 | /* These flags are used to determine which operands are returned during |
155 | execution of the loop. */ |
156 | #define SSA_OP_USE 0x01 /* Real USE operands. */ |
157 | #define SSA_OP_DEF 0x02 /* Real DEF operands. */ |
158 | #define SSA_OP_VUSE 0x04 /* VUSE operands. */ |
159 | #define SSA_OP_VDEF 0x08 /* VDEF operands. */ |
160 | /* These are commonly grouped operand flags. */ |
161 | #define SSA_OP_VIRTUAL_USES (SSA_OP_VUSE) |
162 | #define SSA_OP_VIRTUAL_DEFS (SSA_OP_VDEF) |
163 | #define SSA_OP_ALL_VIRTUALS (SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_DEFS) |
164 | #define SSA_OP_ALL_USES (SSA_OP_VIRTUAL_USES | SSA_OP_USE) |
165 | #define SSA_OP_ALL_DEFS (SSA_OP_VIRTUAL_DEFS | SSA_OP_DEF) |
166 | #define SSA_OP_ALL_OPERANDS (SSA_OP_ALL_USES | SSA_OP_ALL_DEFS) |
167 | |
168 | /* This macro executes a loop over the operands of STMT specified in FLAG, |
169 | returning each operand as a 'tree' in the variable TREEVAR. ITER is an |
170 | ssa_op_iter structure used to control the loop. */ |
171 | #define FOR_EACH_SSA_TREE_OPERAND(TREEVAR, STMT, ITER, FLAGS) \ |
172 | for (TREEVAR = op_iter_init_tree (&(ITER), STMT, FLAGS); \ |
173 | !op_iter_done (&(ITER)); \ |
174 | (void) (TREEVAR = op_iter_next_tree (&(ITER)))) |
175 | |
176 | /* This macro executes a loop over the operands of STMT specified in FLAG, |
177 | returning each operand as a 'use_operand_p' in the variable USEVAR. |
178 | ITER is an ssa_op_iter structure used to control the loop. */ |
179 | #define FOR_EACH_SSA_USE_OPERAND(USEVAR, STMT, ITER, FLAGS) \ |
180 | for (USEVAR = op_iter_init_use (&(ITER), STMT, FLAGS); \ |
181 | !op_iter_done (&(ITER)); \ |
182 | USEVAR = op_iter_next_use (&(ITER))) |
183 | |
184 | /* This macro executes a loop over the operands of STMT specified in FLAG, |
185 | returning each operand as a 'def_operand_p' in the variable DEFVAR. |
186 | ITER is an ssa_op_iter structure used to control the loop. */ |
187 | #define FOR_EACH_SSA_DEF_OPERAND(DEFVAR, STMT, ITER, FLAGS) \ |
188 | for (DEFVAR = op_iter_init_def (&(ITER), STMT, FLAGS); \ |
189 | !op_iter_done (&(ITER)); \ |
190 | DEFVAR = op_iter_next_def (&(ITER))) |
191 | |
192 | /* This macro will execute a loop over all the arguments of a PHI which |
193 | match FLAGS. A use_operand_p is always returned via USEVAR. FLAGS |
194 | can be either SSA_OP_USE or SSA_OP_VIRTUAL_USES or SSA_OP_ALL_USES. */ |
195 | #define FOR_EACH_PHI_ARG(USEVAR, STMT, ITER, FLAGS) \ |
196 | for ((USEVAR) = op_iter_init_phiuse (&(ITER), STMT, FLAGS); \ |
197 | !op_iter_done (&(ITER)); \ |
198 | (USEVAR) = op_iter_next_use (&(ITER))) |
199 | |
200 | |
201 | /* This macro will execute a loop over a stmt, regardless of whether it is |
202 | a real stmt or a PHI node, looking at the USE nodes matching FLAGS. */ |
203 | #define FOR_EACH_PHI_OR_STMT_USE(USEVAR, STMT, ITER, FLAGS) \ |
204 | for ((USEVAR) = (gimple_code (STMT) == GIMPLE_PHI \ |
205 | ? op_iter_init_phiuse (&(ITER), \ |
206 | as_a <gphi *> (STMT), \ |
207 | FLAGS) \ |
208 | : op_iter_init_use (&(ITER), STMT, FLAGS)); \ |
209 | !op_iter_done (&(ITER)); \ |
210 | (USEVAR) = op_iter_next_use (&(ITER))) |
211 | |
212 | /* This macro will execute a loop over a stmt, regardless of whether it is |
213 | a real stmt or a PHI node, looking at the DEF nodes matching FLAGS. */ |
214 | #define FOR_EACH_PHI_OR_STMT_DEF(DEFVAR, STMT, ITER, FLAGS) \ |
215 | for ((DEFVAR) = (gimple_code (STMT) == GIMPLE_PHI \ |
216 | ? op_iter_init_phidef (&(ITER), \ |
217 | as_a <gphi *> (STMT), \ |
218 | FLAGS) \ |
219 | : op_iter_init_def (&(ITER), STMT, FLAGS)); \ |
220 | !op_iter_done (&(ITER)); \ |
221 | (DEFVAR) = op_iter_next_def (&(ITER))) |
222 | |
223 | /* This macro returns an operand in STMT as a tree if it is the ONLY |
224 | operand matching FLAGS. If there are 0 or more than 1 operand matching |
225 | FLAGS, then NULL_TREE is returned. */ |
226 | #define SINGLE_SSA_TREE_OPERAND(STMT, FLAGS) \ |
227 | single_ssa_tree_operand (STMT, FLAGS) |
228 | |
229 | /* This macro returns an operand in STMT as a use_operand_p if it is the ONLY |
230 | operand matching FLAGS. If there are 0 or more than 1 operand matching |
231 | FLAGS, then NULL_USE_OPERAND_P is returned. */ |
232 | #define SINGLE_SSA_USE_OPERAND(STMT, FLAGS) \ |
233 | single_ssa_use_operand (STMT, FLAGS) |
234 | |
235 | /* This macro returns an operand in STMT as a def_operand_p if it is the ONLY |
236 | operand matching FLAGS. If there are 0 or more than 1 operand matching |
237 | FLAGS, then NULL_DEF_OPERAND_P is returned. */ |
238 | #define SINGLE_SSA_DEF_OPERAND(STMT, FLAGS) \ |
239 | single_ssa_def_operand (STMT, FLAGS) |
240 | |
241 | /* This macro returns TRUE if there are no operands matching FLAGS in STMT. */ |
242 | #define ZERO_SSA_OPERANDS(STMT, FLAGS) zero_ssa_operands (STMT, FLAGS) |
243 | |
244 | /* This macro counts the number of operands in STMT matching FLAGS. */ |
245 | #define NUM_SSA_OPERANDS(STMT, FLAGS) num_ssa_operands (STMT, FLAGS) |
246 | |
247 | |
248 | /* Delink an immediate_uses node from its chain. */ |
249 | inline void |
250 | delink_imm_use (ssa_use_operand_t *linknode) |
251 | { |
252 | /* Return if this node is not in a list. */ |
253 | if (linknode->prev == NULL) |
254 | return; |
255 | |
256 | linknode->prev->next = linknode->next; |
257 | linknode->next->prev = linknode->prev; |
258 | linknode->prev = NULL; |
259 | linknode->next = NULL; |
260 | } |
261 | |
262 | /* Link ssa_imm_use node LINKNODE into the chain for LIST. */ |
263 | inline void |
264 | link_imm_use_to_list (ssa_use_operand_t *linknode, ssa_use_operand_t *list) |
265 | { |
266 | /* Link the new node at the head of the list. If we are in the process of |
267 | traversing the list, we won't visit any new nodes added to it. */ |
268 | linknode->prev = list; |
269 | linknode->next = list->next; |
270 | list->next->prev = linknode; |
271 | list->next = linknode; |
272 | } |
273 | |
274 | /* Link ssa_imm_use node LINKNODE into the chain for DEF. */ |
275 | inline void |
276 | link_imm_use (ssa_use_operand_t *linknode, tree def) |
277 | { |
278 | ssa_use_operand_t *root; |
279 | |
280 | if (!def || TREE_CODE (def) != SSA_NAME) |
281 | linknode->prev = NULL; |
282 | else |
283 | { |
284 | root = &(SSA_NAME_IMM_USE_NODE (def)); |
285 | if (linknode->use) |
286 | gcc_checking_assert (*(linknode->use) == def); |
287 | link_imm_use_to_list (linknode, list: root); |
288 | } |
289 | } |
290 | |
291 | /* Set the value of a use pointed to by USE to VAL. */ |
292 | inline void |
293 | set_ssa_use_from_ptr (use_operand_p use, tree val) |
294 | { |
295 | delink_imm_use (linknode: use); |
296 | *(use->use) = val; |
297 | link_imm_use (linknode: use, def: val); |
298 | } |
299 | |
300 | /* Link ssa_imm_use node LINKNODE into the chain for DEF, with use occurring |
301 | in STMT. */ |
302 | inline void |
303 | link_imm_use_stmt (ssa_use_operand_t *linknode, tree def, gimple *stmt) |
304 | { |
305 | if (stmt) |
306 | link_imm_use (linknode, def); |
307 | else |
308 | link_imm_use (linknode, NULL); |
309 | linknode->loc.stmt = stmt; |
310 | } |
311 | |
312 | /* Relink a new node in place of an old node in the list. */ |
313 | inline void |
314 | relink_imm_use (ssa_use_operand_t *node, ssa_use_operand_t *old) |
315 | { |
316 | /* The node one had better be in the same list. */ |
317 | gcc_checking_assert (*(old->use) == *(node->use)); |
318 | node->prev = old->prev; |
319 | node->next = old->next; |
320 | if (old->prev) |
321 | { |
322 | old->prev->next = node; |
323 | old->next->prev = node; |
324 | /* Remove the old node from the list. */ |
325 | old->prev = NULL; |
326 | } |
327 | } |
328 | |
329 | /* Relink ssa_imm_use node LINKNODE into the chain for OLD, with use occurring |
330 | in STMT. */ |
331 | inline void |
332 | relink_imm_use_stmt (ssa_use_operand_t *linknode, ssa_use_operand_t *old, |
333 | gimple *stmt) |
334 | { |
335 | if (stmt) |
336 | relink_imm_use (node: linknode, old); |
337 | else |
338 | link_imm_use (linknode, NULL); |
339 | linknode->loc.stmt = stmt; |
340 | } |
341 | |
342 | |
343 | /* Return true is IMM has reached the end of the immediate use list. */ |
344 | inline bool |
345 | end_readonly_imm_use_p (const imm_use_iterator *imm) |
346 | { |
347 | return (imm->imm_use == imm->end_p); |
348 | } |
349 | |
350 | /* Initialize iterator IMM to process the list for VAR. */ |
351 | inline use_operand_p |
352 | first_readonly_imm_use (imm_use_iterator *imm, tree var) |
353 | { |
354 | imm->end_p = &(SSA_NAME_IMM_USE_NODE (var)); |
355 | imm->imm_use = imm->end_p->next; |
356 | imm->iter_node.next = imm->imm_use->next; |
357 | if (end_readonly_imm_use_p (imm)) |
358 | return NULL_USE_OPERAND_P; |
359 | return imm->imm_use; |
360 | } |
361 | |
362 | /* Bump IMM to the next use in the list. */ |
363 | inline use_operand_p |
364 | next_readonly_imm_use (imm_use_iterator *imm) |
365 | { |
366 | use_operand_p old = imm->imm_use; |
367 | |
368 | /* If this assertion fails, it indicates the 'next' pointer has changed |
369 | since the last bump. This indicates that the list is being modified |
370 | via stmt changes, or SET_USE, or somesuch thing, and you need to be |
371 | using the SAFE version of the iterator. */ |
372 | if (flag_checking) |
373 | { |
374 | gcc_assert (imm->iter_node.next == old->next); |
375 | imm->iter_node.next = old->next->next; |
376 | } |
377 | |
378 | imm->imm_use = old->next; |
379 | if (end_readonly_imm_use_p (imm)) |
380 | return NULL_USE_OPERAND_P; |
381 | return imm->imm_use; |
382 | } |
383 | |
384 | |
385 | /* Return true if VAR has no nondebug uses. */ |
386 | inline bool |
387 | has_zero_uses (const_tree var) |
388 | { |
389 | const ssa_use_operand_t *const head = &(SSA_NAME_IMM_USE_NODE (var)); |
390 | const ssa_use_operand_t *ptr; |
391 | |
392 | for (ptr = head->next; ptr != head; ptr = ptr->next) |
393 | if (USE_STMT (ptr) && !is_gimple_debug (USE_STMT (ptr))) |
394 | return false; |
395 | |
396 | return true; |
397 | } |
398 | |
399 | /* Return true if VAR has a single nondebug use. */ |
400 | inline bool |
401 | has_single_use (const_tree var) |
402 | { |
403 | const ssa_use_operand_t *const head = &(SSA_NAME_IMM_USE_NODE (var)); |
404 | const ssa_use_operand_t *ptr; |
405 | bool single = false; |
406 | |
407 | for (ptr = head->next; ptr != head; ptr = ptr->next) |
408 | if (USE_STMT(ptr) && !is_gimple_debug (USE_STMT (ptr))) |
409 | { |
410 | if (single) |
411 | return false; |
412 | else |
413 | single = true; |
414 | } |
415 | |
416 | return single; |
417 | } |
418 | |
419 | /* If VAR has only a single immediate nondebug use, return true, and |
420 | set USE_P and STMT to the use pointer and stmt of occurrence. */ |
421 | inline bool |
422 | single_imm_use (const_tree var, use_operand_p *use_p, gimple **stmt) |
423 | { |
424 | const ssa_use_operand_t *const ptr = &(SSA_NAME_IMM_USE_NODE (var)); |
425 | |
426 | /* If there aren't any uses whatsoever, we're done. */ |
427 | if (ptr == ptr->next) |
428 | { |
429 | return_false: |
430 | *use_p = NULL_USE_OPERAND_P; |
431 | *stmt = NULL; |
432 | return false; |
433 | } |
434 | |
435 | /* If there's a single use, check that it's not a debug stmt. */ |
436 | if (ptr == ptr->next->next) |
437 | { |
438 | if (USE_STMT (ptr->next) && !is_gimple_debug (USE_STMT (ptr->next))) |
439 | { |
440 | *use_p = ptr->next; |
441 | *stmt = ptr->next->loc.stmt; |
442 | return true; |
443 | } |
444 | else |
445 | goto return_false; |
446 | } |
447 | |
448 | return single_imm_use_1 (head: ptr, use_p, stmt); |
449 | } |
450 | |
451 | /* Return the number of nondebug immediate uses of VAR. */ |
452 | inline unsigned int |
453 | num_imm_uses (const_tree var) |
454 | { |
455 | const ssa_use_operand_t *const start = &(SSA_NAME_IMM_USE_NODE (var)); |
456 | const ssa_use_operand_t *ptr; |
457 | unsigned int num = 0; |
458 | |
459 | if (!MAY_HAVE_DEBUG_BIND_STMTS) |
460 | { |
461 | for (ptr = start->next; ptr != start; ptr = ptr->next) |
462 | if (USE_STMT (ptr)) |
463 | num++; |
464 | } |
465 | else |
466 | for (ptr = start->next; ptr != start; ptr = ptr->next) |
467 | if (USE_STMT (ptr) && !is_gimple_debug (USE_STMT (ptr))) |
468 | num++; |
469 | |
470 | return num; |
471 | } |
472 | |
473 | /* ----------------------------------------------------------------------- */ |
474 | |
475 | /* The following set of routines are used to iterator over various type of |
476 | SSA operands. */ |
477 | |
478 | /* Return true if PTR is finished iterating. */ |
479 | inline bool |
480 | op_iter_done (const ssa_op_iter *ptr) |
481 | { |
482 | return ptr->done; |
483 | } |
484 | |
485 | /* Get the next iterator use value for PTR. */ |
486 | inline use_operand_p |
487 | op_iter_next_use (ssa_op_iter *ptr) |
488 | { |
489 | use_operand_p use_p; |
490 | gcc_checking_assert (ptr->iter_type == ssa_op_iter_use); |
491 | if (ptr->uses) |
492 | { |
493 | use_p = USE_OP_PTR (ptr->uses); |
494 | ptr->uses = ptr->uses->next; |
495 | return use_p; |
496 | } |
497 | if (ptr->i < ptr->numops) |
498 | { |
499 | return PHI_ARG_DEF_PTR (ptr->stmt, (ptr->i)++); |
500 | } |
501 | ptr->done = true; |
502 | return NULL_USE_OPERAND_P; |
503 | } |
504 | |
505 | /* Get the next iterator def value for PTR. */ |
506 | inline def_operand_p |
507 | op_iter_next_def (ssa_op_iter *ptr) |
508 | { |
509 | gcc_checking_assert (ptr->iter_type == ssa_op_iter_def); |
510 | if (ptr->flags & SSA_OP_VDEF) |
511 | { |
512 | tree *p; |
513 | ptr->flags &= ~SSA_OP_VDEF; |
514 | p = gimple_vdef_ptr (g: ptr->stmt); |
515 | if (p && *p) |
516 | return p; |
517 | } |
518 | if (ptr->flags & SSA_OP_DEF) |
519 | { |
520 | while (ptr->i < ptr->numops) |
521 | { |
522 | tree *val = gimple_op_ptr (gs: ptr->stmt, i: ptr->i); |
523 | ptr->i++; |
524 | if (*val) |
525 | { |
526 | if (TREE_CODE (*val) == TREE_LIST) |
527 | val = &TREE_VALUE (*val); |
528 | if (TREE_CODE (*val) == SSA_NAME |
529 | || is_gimple_reg (*val)) |
530 | return val; |
531 | } |
532 | } |
533 | ptr->flags &= ~SSA_OP_DEF; |
534 | } |
535 | |
536 | ptr->done = true; |
537 | return NULL_DEF_OPERAND_P; |
538 | } |
539 | |
540 | /* Get the next iterator tree value for PTR. */ |
541 | inline tree |
542 | op_iter_next_tree (ssa_op_iter *ptr) |
543 | { |
544 | tree val; |
545 | gcc_checking_assert (ptr->iter_type == ssa_op_iter_tree); |
546 | if (ptr->uses) |
547 | { |
548 | val = USE_OP (ptr->uses); |
549 | ptr->uses = ptr->uses->next; |
550 | return val; |
551 | } |
552 | if (ptr->flags & SSA_OP_VDEF) |
553 | { |
554 | ptr->flags &= ~SSA_OP_VDEF; |
555 | if ((val = gimple_vdef (g: ptr->stmt))) |
556 | return val; |
557 | } |
558 | if (ptr->flags & SSA_OP_DEF) |
559 | { |
560 | while (ptr->i < ptr->numops) |
561 | { |
562 | val = gimple_op (gs: ptr->stmt, i: ptr->i); |
563 | ptr->i++; |
564 | if (val) |
565 | { |
566 | if (TREE_CODE (val) == TREE_LIST) |
567 | val = TREE_VALUE (val); |
568 | if (TREE_CODE (val) == SSA_NAME |
569 | || is_gimple_reg (val)) |
570 | return val; |
571 | } |
572 | } |
573 | ptr->flags &= ~SSA_OP_DEF; |
574 | } |
575 | |
576 | ptr->done = true; |
577 | return NULL_TREE; |
578 | } |
579 | |
580 | |
581 | /* This functions clears the iterator PTR, and marks it done. This is normally |
582 | used to prevent warnings in the compile about might be uninitialized |
583 | components. */ |
584 | |
585 | inline void |
586 | clear_and_done_ssa_iter (ssa_op_iter *ptr) |
587 | { |
588 | ptr->i = 0; |
589 | ptr->numops = 0; |
590 | ptr->uses = NULL; |
591 | ptr->iter_type = ssa_op_iter_none; |
592 | ptr->stmt = NULL; |
593 | ptr->done = true; |
594 | ptr->flags = 0; |
595 | } |
596 | |
597 | /* Initialize the iterator PTR to the virtual defs in STMT. */ |
598 | inline void |
599 | op_iter_init (ssa_op_iter *ptr, gimple *stmt, int flags) |
600 | { |
601 | /* PHI nodes require a different iterator initialization path. We |
602 | do not support iterating over virtual defs or uses without |
603 | iterating over defs or uses at the same time. */ |
604 | gcc_checking_assert (gimple_code (stmt) != GIMPLE_PHI |
605 | && (!(flags & SSA_OP_VDEF) || (flags & SSA_OP_DEF)) |
606 | && (!(flags & SSA_OP_VUSE) || (flags & SSA_OP_USE))); |
607 | ptr->numops = 0; |
608 | if (flags & (SSA_OP_DEF | SSA_OP_VDEF)) |
609 | { |
610 | switch (gimple_code (g: stmt)) |
611 | { |
612 | case GIMPLE_ASSIGN: |
613 | case GIMPLE_CALL: |
614 | ptr->numops = 1; |
615 | break; |
616 | case GIMPLE_ASM: |
617 | ptr->numops = gimple_asm_noutputs (asm_stmt: as_a <gasm *> (p: stmt)); |
618 | break; |
619 | case GIMPLE_TRANSACTION: |
620 | ptr->numops = 0; |
621 | flags &= ~SSA_OP_DEF; |
622 | break; |
623 | default: |
624 | ptr->numops = 0; |
625 | flags &= ~(SSA_OP_DEF | SSA_OP_VDEF); |
626 | break; |
627 | } |
628 | } |
629 | ptr->uses = (flags & (SSA_OP_USE|SSA_OP_VUSE)) ? gimple_use_ops (g: stmt) : NULL; |
630 | if (!(flags & SSA_OP_VUSE) |
631 | && ptr->uses |
632 | && gimple_vuse (g: stmt) != NULL_TREE) |
633 | ptr->uses = ptr->uses->next; |
634 | ptr->done = false; |
635 | ptr->i = 0; |
636 | |
637 | ptr->stmt = stmt; |
638 | ptr->flags = flags; |
639 | } |
640 | |
641 | /* Initialize iterator PTR to the use operands in STMT based on FLAGS. Return |
642 | the first use. */ |
643 | inline use_operand_p |
644 | op_iter_init_use (ssa_op_iter *ptr, gimple *stmt, int flags) |
645 | { |
646 | gcc_checking_assert ((flags & SSA_OP_ALL_DEFS) == 0 |
647 | && (flags & SSA_OP_USE)); |
648 | op_iter_init (ptr, stmt, flags); |
649 | ptr->iter_type = ssa_op_iter_use; |
650 | return op_iter_next_use (ptr); |
651 | } |
652 | |
653 | /* Initialize iterator PTR to the def operands in STMT based on FLAGS. Return |
654 | the first def. */ |
655 | inline def_operand_p |
656 | op_iter_init_def (ssa_op_iter *ptr, gimple *stmt, int flags) |
657 | { |
658 | gcc_checking_assert ((flags & SSA_OP_ALL_USES) == 0 |
659 | && (flags & SSA_OP_DEF)); |
660 | op_iter_init (ptr, stmt, flags); |
661 | ptr->iter_type = ssa_op_iter_def; |
662 | return op_iter_next_def (ptr); |
663 | } |
664 | |
665 | /* Initialize iterator PTR to the operands in STMT based on FLAGS. Return |
666 | the first operand as a tree. */ |
667 | inline tree |
668 | op_iter_init_tree (ssa_op_iter *ptr, gimple *stmt, int flags) |
669 | { |
670 | op_iter_init (ptr, stmt, flags); |
671 | ptr->iter_type = ssa_op_iter_tree; |
672 | return op_iter_next_tree (ptr); |
673 | } |
674 | |
675 | |
676 | /* If there is a single operand in STMT matching FLAGS, return it. Otherwise |
677 | return NULL. */ |
678 | inline tree |
679 | single_ssa_tree_operand (gimple *stmt, int flags) |
680 | { |
681 | tree var; |
682 | ssa_op_iter iter; |
683 | |
684 | var = op_iter_init_tree (ptr: &iter, stmt, flags); |
685 | if (op_iter_done (ptr: &iter)) |
686 | return NULL_TREE; |
687 | op_iter_next_tree (ptr: &iter); |
688 | if (op_iter_done (ptr: &iter)) |
689 | return var; |
690 | return NULL_TREE; |
691 | } |
692 | |
693 | |
694 | /* If there is a single operand in STMT matching FLAGS, return it. Otherwise |
695 | return NULL. */ |
696 | inline use_operand_p |
697 | single_ssa_use_operand (gimple *stmt, int flags) |
698 | { |
699 | use_operand_p var; |
700 | ssa_op_iter iter; |
701 | |
702 | var = op_iter_init_use (ptr: &iter, stmt, flags); |
703 | if (op_iter_done (ptr: &iter)) |
704 | return NULL_USE_OPERAND_P; |
705 | op_iter_next_use (ptr: &iter); |
706 | if (op_iter_done (ptr: &iter)) |
707 | return var; |
708 | return NULL_USE_OPERAND_P; |
709 | } |
710 | |
711 | /* Return the single virtual use operand in STMT if present. Otherwise |
712 | return NULL. */ |
713 | inline use_operand_p |
714 | ssa_vuse_operand (gimple *stmt) |
715 | { |
716 | if (! gimple_vuse (g: stmt)) |
717 | return NULL_USE_OPERAND_P; |
718 | return USE_OP_PTR (gimple_use_ops (stmt)); |
719 | } |
720 | |
721 | |
722 | /* If there is a single operand in STMT matching FLAGS, return it. Otherwise |
723 | return NULL. */ |
724 | inline def_operand_p |
725 | single_ssa_def_operand (gimple *stmt, int flags) |
726 | { |
727 | def_operand_p var; |
728 | ssa_op_iter iter; |
729 | |
730 | var = op_iter_init_def (ptr: &iter, stmt, flags); |
731 | if (op_iter_done (ptr: &iter)) |
732 | return NULL_DEF_OPERAND_P; |
733 | op_iter_next_def (ptr: &iter); |
734 | if (op_iter_done (ptr: &iter)) |
735 | return var; |
736 | return NULL_DEF_OPERAND_P; |
737 | } |
738 | |
739 | |
740 | /* Return true if there are zero operands in STMT matching the type |
741 | given in FLAGS. */ |
742 | inline bool |
743 | zero_ssa_operands (gimple *stmt, int flags) |
744 | { |
745 | ssa_op_iter iter; |
746 | |
747 | op_iter_init_tree (ptr: &iter, stmt, flags); |
748 | return op_iter_done (ptr: &iter); |
749 | } |
750 | |
751 | |
752 | /* Return the number of operands matching FLAGS in STMT. */ |
753 | inline int |
754 | num_ssa_operands (gimple *stmt, int flags) |
755 | { |
756 | ssa_op_iter iter; |
757 | tree t; |
758 | int num = 0; |
759 | |
760 | gcc_checking_assert (gimple_code (stmt) != GIMPLE_PHI); |
761 | FOR_EACH_SSA_TREE_OPERAND (t, stmt, iter, flags) |
762 | num++; |
763 | return num; |
764 | } |
765 | |
766 | /* If there is a single DEF in the PHI node which matches FLAG, return it. |
767 | Otherwise return NULL_DEF_OPERAND_P. */ |
768 | inline tree |
769 | single_phi_def (gphi *stmt, int flags) |
770 | { |
771 | tree def = PHI_RESULT (stmt); |
772 | if ((flags & SSA_OP_DEF) && is_gimple_reg (def)) |
773 | return def; |
774 | if ((flags & SSA_OP_VIRTUAL_DEFS) && !is_gimple_reg (def)) |
775 | return def; |
776 | return NULL_TREE; |
777 | } |
778 | |
779 | /* Initialize the iterator PTR for uses matching FLAGS in PHI. FLAGS should |
780 | be either SSA_OP_USES or SSA_OP_VIRTUAL_USES. */ |
781 | inline use_operand_p |
782 | op_iter_init_phiuse (ssa_op_iter *ptr, gphi *phi, int flags) |
783 | { |
784 | tree phi_def = gimple_phi_result (gs: phi); |
785 | int comp; |
786 | |
787 | clear_and_done_ssa_iter (ptr); |
788 | ptr->done = false; |
789 | |
790 | gcc_checking_assert ((flags & (SSA_OP_USE | SSA_OP_VIRTUAL_USES)) != 0); |
791 | |
792 | comp = (is_gimple_reg (phi_def) ? SSA_OP_USE : SSA_OP_VIRTUAL_USES); |
793 | |
794 | /* If the PHI node doesn't the operand type we care about, we're done. */ |
795 | if ((flags & comp) == 0) |
796 | { |
797 | ptr->done = true; |
798 | return NULL_USE_OPERAND_P; |
799 | } |
800 | |
801 | ptr->stmt = phi; |
802 | ptr->numops = gimple_phi_num_args (gs: phi); |
803 | ptr->iter_type = ssa_op_iter_use; |
804 | ptr->flags = flags; |
805 | return op_iter_next_use (ptr); |
806 | } |
807 | |
808 | |
809 | /* Start an iterator for a PHI definition. */ |
810 | |
811 | inline def_operand_p |
812 | op_iter_init_phidef (ssa_op_iter *ptr, gphi *phi, int flags) |
813 | { |
814 | tree phi_def = PHI_RESULT (phi); |
815 | int comp; |
816 | |
817 | clear_and_done_ssa_iter (ptr); |
818 | ptr->done = false; |
819 | |
820 | gcc_checking_assert ((flags & (SSA_OP_DEF | SSA_OP_VIRTUAL_DEFS)) != 0); |
821 | |
822 | comp = (is_gimple_reg (phi_def) ? SSA_OP_DEF : SSA_OP_VIRTUAL_DEFS); |
823 | |
824 | /* If the PHI node doesn't have the operand type we care about, |
825 | we're done. */ |
826 | if ((flags & comp) == 0) |
827 | { |
828 | ptr->done = true; |
829 | return NULL_DEF_OPERAND_P; |
830 | } |
831 | |
832 | ptr->iter_type = ssa_op_iter_def; |
833 | /* The first call to op_iter_next_def will terminate the iterator since |
834 | all the fields are NULL. Simply return the result here as the first and |
835 | therefore only result. */ |
836 | return PHI_RESULT_PTR (phi); |
837 | } |
838 | |
839 | /* Return true is IMM has reached the end of the immediate use stmt list. */ |
840 | |
841 | inline bool |
842 | end_imm_use_stmt_p (const imm_use_iterator *imm) |
843 | { |
844 | return (imm->imm_use == imm->end_p); |
845 | } |
846 | |
847 | /* Finished the traverse of an immediate use stmt list IMM by removing the |
848 | placeholder node from the list. */ |
849 | |
850 | inline void |
851 | end_imm_use_stmt_traverse (imm_use_iterator *imm) |
852 | { |
853 | delink_imm_use (linknode: &(imm->iter_node)); |
854 | } |
855 | |
856 | /* Immediate use traversal of uses within a stmt require that all the |
857 | uses on a stmt be sequentially listed. This routine is used to build up |
858 | this sequential list by adding USE_P to the end of the current list |
859 | currently delimited by HEAD and LAST_P. The new LAST_P value is |
860 | returned. */ |
861 | |
862 | inline use_operand_p |
863 | move_use_after_head (use_operand_p use_p, use_operand_p head, |
864 | use_operand_p last_p) |
865 | { |
866 | gcc_checking_assert (USE_FROM_PTR (use_p) == USE_FROM_PTR (head)); |
867 | /* Skip head when we find it. */ |
868 | if (use_p != head) |
869 | { |
870 | /* If use_p is already linked in after last_p, continue. */ |
871 | if (last_p->next == use_p) |
872 | last_p = use_p; |
873 | else |
874 | { |
875 | /* Delink from current location, and link in at last_p. */ |
876 | delink_imm_use (linknode: use_p); |
877 | link_imm_use_to_list (linknode: use_p, list: last_p); |
878 | last_p = use_p; |
879 | } |
880 | } |
881 | return last_p; |
882 | } |
883 | |
884 | |
885 | /* This routine will relink all uses with the same stmt as HEAD into the list |
886 | immediately following HEAD for iterator IMM. */ |
887 | |
888 | inline void |
889 | link_use_stmts_after (use_operand_p head, imm_use_iterator *imm) |
890 | { |
891 | use_operand_p use_p; |
892 | use_operand_p last_p = head; |
893 | gimple *head_stmt = USE_STMT (head); |
894 | tree use = USE_FROM_PTR (head); |
895 | ssa_op_iter op_iter; |
896 | int flag; |
897 | |
898 | /* Only look at virtual or real uses, depending on the type of HEAD. */ |
899 | flag = (is_gimple_reg (use) ? SSA_OP_USE : SSA_OP_VIRTUAL_USES); |
900 | |
901 | if (gphi *phi = dyn_cast <gphi *> (p: head_stmt)) |
902 | { |
903 | FOR_EACH_PHI_ARG (use_p, phi, op_iter, flag) |
904 | if (USE_FROM_PTR (use_p) == use) |
905 | last_p = move_use_after_head (use_p, head, last_p); |
906 | } |
907 | else |
908 | { |
909 | if (flag == SSA_OP_USE) |
910 | { |
911 | FOR_EACH_SSA_USE_OPERAND (use_p, head_stmt, op_iter, flag) |
912 | if (USE_FROM_PTR (use_p) == use) |
913 | last_p = move_use_after_head (use_p, head, last_p); |
914 | } |
915 | else if ((use_p = gimple_vuse_op (g: head_stmt)) != NULL_USE_OPERAND_P) |
916 | { |
917 | if (USE_FROM_PTR (use_p) == use) |
918 | last_p = move_use_after_head (use_p, head, last_p); |
919 | } |
920 | } |
921 | /* Link iter node in after last_p. */ |
922 | if (imm->iter_node.prev != NULL) |
923 | delink_imm_use (linknode: &imm->iter_node); |
924 | link_imm_use_to_list (linknode: &(imm->iter_node), list: last_p); |
925 | } |
926 | |
927 | /* Initialize IMM to traverse over uses of VAR. Return the first statement. */ |
928 | inline gimple * |
929 | first_imm_use_stmt (imm_use_iterator *imm, tree var) |
930 | { |
931 | imm->end_p = &(SSA_NAME_IMM_USE_NODE (var)); |
932 | imm->imm_use = imm->end_p->next; |
933 | imm->next_imm_name = NULL_USE_OPERAND_P; |
934 | |
935 | /* iter_node is used as a marker within the immediate use list to indicate |
936 | where the end of the current stmt's uses are. Initialize it to NULL |
937 | stmt and use, which indicates a marker node. */ |
938 | imm->iter_node.prev = NULL_USE_OPERAND_P; |
939 | imm->iter_node.next = NULL_USE_OPERAND_P; |
940 | imm->iter_node.loc.stmt = NULL; |
941 | imm->iter_node.use = NULL; |
942 | |
943 | if (end_imm_use_stmt_p (imm)) |
944 | return NULL; |
945 | |
946 | link_use_stmts_after (head: imm->imm_use, imm); |
947 | |
948 | return USE_STMT (imm->imm_use); |
949 | } |
950 | |
951 | /* Bump IMM to the next stmt which has a use of var. */ |
952 | |
953 | inline gimple * |
954 | next_imm_use_stmt (imm_use_iterator *imm) |
955 | { |
956 | imm->imm_use = imm->iter_node.next; |
957 | if (end_imm_use_stmt_p (imm)) |
958 | { |
959 | if (imm->iter_node.prev != NULL) |
960 | delink_imm_use (linknode: &imm->iter_node); |
961 | return NULL; |
962 | } |
963 | |
964 | link_use_stmts_after (head: imm->imm_use, imm); |
965 | return USE_STMT (imm->imm_use); |
966 | } |
967 | |
968 | /* This routine will return the first use on the stmt IMM currently refers |
969 | to. */ |
970 | |
971 | inline use_operand_p |
972 | first_imm_use_on_stmt (imm_use_iterator *imm) |
973 | { |
974 | imm->next_imm_name = imm->imm_use->next; |
975 | return imm->imm_use; |
976 | } |
977 | |
978 | /* Return TRUE if the last use on the stmt IMM refers to has been visited. */ |
979 | |
980 | inline bool |
981 | end_imm_use_on_stmt_p (const imm_use_iterator *imm) |
982 | { |
983 | return (imm->imm_use == &(imm->iter_node)); |
984 | } |
985 | |
986 | /* Bump to the next use on the stmt IMM refers to, return NULL if done. */ |
987 | |
988 | inline use_operand_p |
989 | next_imm_use_on_stmt (imm_use_iterator *imm) |
990 | { |
991 | imm->imm_use = imm->next_imm_name; |
992 | if (end_imm_use_on_stmt_p (imm)) |
993 | return NULL_USE_OPERAND_P; |
994 | else |
995 | { |
996 | imm->next_imm_name = imm->imm_use->next; |
997 | return imm->imm_use; |
998 | } |
999 | } |
1000 | |
1001 | /* Delink all immediate_use information for STMT. */ |
1002 | inline void |
1003 | delink_stmt_imm_use (gimple *stmt) |
1004 | { |
1005 | ssa_op_iter iter; |
1006 | use_operand_p use_p; |
1007 | |
1008 | if (ssa_operands_active (cfun)) |
1009 | FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_ALL_USES) |
1010 | delink_imm_use (linknode: use_p); |
1011 | } |
1012 | |
1013 | #endif /* GCC_TREE_SSA_ITERATORS_H */ |
1014 | |