1 | /* Gimple walk support. |
2 | |
3 | Copyright (C) 2007-2024 Free Software Foundation, Inc. |
4 | Contributed by Aldy Hernandez <aldyh@redhat.com> |
5 | |
6 | This file is part of GCC. |
7 | |
8 | GCC is free software; you can redistribute it and/or modify it under |
9 | the terms of the GNU General Public License as published by the Free |
10 | Software Foundation; either version 3, or (at your option) any later |
11 | version. |
12 | |
13 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
14 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
15 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
16 | for more details. |
17 | |
18 | You should have received a copy of the GNU General Public License |
19 | along with GCC; see the file COPYING3. If not see |
20 | <http://www.gnu.org/licenses/>. */ |
21 | |
22 | #include "config.h" |
23 | #include "system.h" |
24 | #include "coretypes.h" |
25 | #include "backend.h" |
26 | #include "tree.h" |
27 | #include "gimple.h" |
28 | #include "gimple-iterator.h" |
29 | #include "gimple-walk.h" |
30 | #include "stmt.h" |
31 | |
32 | /* Walk all the statements in the sequence *PSEQ calling walk_gimple_stmt |
33 | on each one. WI is as in walk_gimple_stmt. |
34 | |
35 | If walk_gimple_stmt returns non-NULL, the walk is stopped, and the |
36 | value is stored in WI->CALLBACK_RESULT. Also, the statement that |
37 | produced the value is returned if this statement has not been |
38 | removed by a callback (wi->removed_stmt). If the statement has |
39 | been removed, NULL is returned. |
40 | |
41 | Otherwise, all the statements are walked and NULL returned. */ |
42 | |
43 | gimple * |
44 | walk_gimple_seq_mod (gimple_seq *pseq, walk_stmt_fn callback_stmt, |
45 | walk_tree_fn callback_op, struct walk_stmt_info *wi) |
46 | { |
47 | gimple_stmt_iterator gsi; |
48 | |
49 | for (gsi = gsi_start (seq&: *pseq); !gsi_end_p (i: gsi); ) |
50 | { |
51 | tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi); |
52 | if (ret) |
53 | { |
54 | /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist |
55 | to hold it. */ |
56 | gcc_assert (wi); |
57 | wi->callback_result = ret; |
58 | |
59 | gimple *g; |
60 | if (!wi->removed_stmt) |
61 | g = gsi_stmt (i: gsi); |
62 | else |
63 | { |
64 | g = NULL; |
65 | wi->removed_stmt = false; |
66 | } |
67 | return g; |
68 | } |
69 | |
70 | if (!wi->removed_stmt) |
71 | gsi_next (i: &gsi); |
72 | else |
73 | wi->removed_stmt = false; |
74 | } |
75 | |
76 | if (wi) |
77 | wi->callback_result = NULL_TREE; |
78 | |
79 | return NULL; |
80 | } |
81 | |
82 | |
83 | /* Like walk_gimple_seq_mod, but ensure that the head of SEQ isn't |
84 | changed by the callbacks. */ |
85 | |
86 | gimple * |
87 | walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt, |
88 | walk_tree_fn callback_op, struct walk_stmt_info *wi) |
89 | { |
90 | gimple_seq seq2 = seq; |
91 | gimple *ret = walk_gimple_seq_mod (pseq: &seq2, callback_stmt, callback_op, wi); |
92 | gcc_assert (seq2 == seq); |
93 | return ret; |
94 | } |
95 | |
96 | |
97 | /* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */ |
98 | |
99 | static tree |
100 | walk_gimple_asm (gasm *stmt, walk_tree_fn callback_op, |
101 | struct walk_stmt_info *wi) |
102 | { |
103 | tree ret, op; |
104 | unsigned noutputs; |
105 | const char **oconstraints; |
106 | unsigned i, n; |
107 | const char *constraint; |
108 | bool allows_mem, allows_reg, is_inout; |
109 | |
110 | noutputs = gimple_asm_noutputs (asm_stmt: stmt); |
111 | oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *)); |
112 | |
113 | for (i = 0; i < noutputs; i++) |
114 | { |
115 | op = gimple_asm_output_op (asm_stmt: stmt, index: i); |
116 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op))); |
117 | oconstraints[i] = constraint; |
118 | if (wi) |
119 | { |
120 | if (parse_output_constraint (&constraint, i, 0, 0, &allows_mem, |
121 | &allows_reg, &is_inout)) |
122 | wi->val_only = (allows_reg || !allows_mem); |
123 | } |
124 | if (wi) |
125 | wi->is_lhs = true; |
126 | ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL); |
127 | if (ret) |
128 | return ret; |
129 | } |
130 | |
131 | n = gimple_asm_ninputs (asm_stmt: stmt); |
132 | for (i = 0; i < n; i++) |
133 | { |
134 | op = gimple_asm_input_op (asm_stmt: stmt, index: i); |
135 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op))); |
136 | |
137 | if (wi) |
138 | { |
139 | if (parse_input_constraint (&constraint, 0, 0, noutputs, 0, |
140 | oconstraints, &allows_mem, &allows_reg)) |
141 | { |
142 | wi->val_only = (allows_reg || !allows_mem); |
143 | /* Although input "m" is not really a LHS, we need a lvalue. */ |
144 | wi->is_lhs = !wi->val_only; |
145 | } |
146 | } |
147 | ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL); |
148 | if (ret) |
149 | return ret; |
150 | } |
151 | |
152 | if (wi) |
153 | { |
154 | wi->is_lhs = false; |
155 | wi->val_only = true; |
156 | } |
157 | |
158 | n = gimple_asm_nlabels (asm_stmt: stmt); |
159 | for (i = 0; i < n; i++) |
160 | { |
161 | op = gimple_asm_label_op (asm_stmt: stmt, index: i); |
162 | ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL); |
163 | if (ret) |
164 | return ret; |
165 | } |
166 | |
167 | return NULL_TREE; |
168 | } |
169 | |
170 | |
171 | /* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in |
172 | STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT. |
173 | |
174 | CALLBACK_OP is called on each operand of STMT via walk_tree. |
175 | Additional parameters to walk_tree must be stored in WI. For each operand |
176 | OP, walk_tree is called as: |
177 | |
178 | walk_tree (&OP, CALLBACK_OP, WI, WI->PSET) |
179 | |
180 | If CALLBACK_OP returns non-NULL for an operand, the remaining |
181 | operands are not scanned. |
182 | |
183 | The return value is that returned by the last call to walk_tree, or |
184 | NULL_TREE if no CALLBACK_OP is specified. */ |
185 | |
186 | tree |
187 | walk_gimple_op (gimple *stmt, walk_tree_fn callback_op, |
188 | struct walk_stmt_info *wi) |
189 | { |
190 | hash_set<tree> *pset = (wi) ? wi->pset : NULL; |
191 | unsigned i; |
192 | tree ret = NULL_TREE; |
193 | |
194 | if (wi) |
195 | wi->stmt = stmt; |
196 | |
197 | switch (gimple_code (g: stmt)) |
198 | { |
199 | case GIMPLE_ASSIGN: |
200 | /* Walk the RHS operands. If the LHS is of a non-renamable type or |
201 | is a register variable, we may use a COMPONENT_REF on the RHS. */ |
202 | if (wi) |
203 | { |
204 | tree lhs = gimple_assign_lhs (gs: stmt); |
205 | wi->val_only |
206 | = (is_gimple_reg_type (TREE_TYPE (lhs)) && !is_gimple_reg (lhs)) |
207 | || gimple_assign_rhs_class (gs: stmt) != GIMPLE_SINGLE_RHS; |
208 | } |
209 | |
210 | for (i = 1; i < gimple_num_ops (gs: stmt); i++) |
211 | { |
212 | ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, |
213 | pset); |
214 | if (ret) |
215 | return ret; |
216 | } |
217 | |
218 | /* Walk the LHS. If the RHS is appropriate for a memory, we |
219 | may use a COMPONENT_REF on the LHS. */ |
220 | if (wi) |
221 | { |
222 | /* If the RHS is of a non-renamable type or is a register variable, |
223 | we may use a COMPONENT_REF on the LHS. */ |
224 | tree rhs1 = gimple_assign_rhs1 (gs: stmt); |
225 | wi->val_only |
226 | = (is_gimple_reg_type (TREE_TYPE (rhs1)) && !is_gimple_reg (rhs1)) |
227 | || gimple_assign_rhs_class (gs: stmt) != GIMPLE_SINGLE_RHS; |
228 | wi->is_lhs = true; |
229 | } |
230 | |
231 | ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset); |
232 | if (ret) |
233 | return ret; |
234 | |
235 | if (wi) |
236 | { |
237 | wi->val_only = true; |
238 | wi->is_lhs = false; |
239 | } |
240 | break; |
241 | |
242 | case GIMPLE_CALL: |
243 | if (wi) |
244 | { |
245 | wi->is_lhs = false; |
246 | wi->val_only = true; |
247 | } |
248 | |
249 | ret = walk_tree (gimple_call_chain_ptr (as_a <gcall *> (stmt)), |
250 | callback_op, wi, pset); |
251 | if (ret) |
252 | return ret; |
253 | |
254 | ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset); |
255 | if (ret) |
256 | return ret; |
257 | |
258 | for (i = 0; i < gimple_call_num_args (gs: stmt); i++) |
259 | { |
260 | if (wi) |
261 | wi->val_only |
262 | = is_gimple_reg_type (TREE_TYPE (gimple_call_arg (stmt, i))); |
263 | ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi, |
264 | pset); |
265 | if (ret) |
266 | return ret; |
267 | } |
268 | |
269 | if (gimple_call_lhs (gs: stmt)) |
270 | { |
271 | if (wi) |
272 | { |
273 | wi->is_lhs = true; |
274 | wi->val_only |
275 | = is_gimple_reg_type (TREE_TYPE (gimple_call_lhs (stmt))); |
276 | } |
277 | |
278 | ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset); |
279 | if (ret) |
280 | return ret; |
281 | } |
282 | |
283 | if (wi) |
284 | { |
285 | wi->is_lhs = false; |
286 | wi->val_only = true; |
287 | } |
288 | break; |
289 | |
290 | case GIMPLE_CATCH: |
291 | ret = walk_tree (gimple_catch_types_ptr (as_a <gcatch *> (stmt)), |
292 | callback_op, wi, pset); |
293 | if (ret) |
294 | return ret; |
295 | break; |
296 | |
297 | case GIMPLE_EH_FILTER: |
298 | ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi, |
299 | pset); |
300 | if (ret) |
301 | return ret; |
302 | break; |
303 | |
304 | case GIMPLE_ASM: |
305 | ret = walk_gimple_asm (stmt: as_a <gasm *> (p: stmt), callback_op, wi); |
306 | if (ret) |
307 | return ret; |
308 | break; |
309 | |
310 | case GIMPLE_OMP_CONTINUE: |
311 | { |
312 | gomp_continue *cont_stmt = as_a <gomp_continue *> (p: stmt); |
313 | ret = walk_tree (gimple_omp_continue_control_def_ptr (cont_stmt), |
314 | callback_op, wi, pset); |
315 | if (ret) |
316 | return ret; |
317 | |
318 | ret = walk_tree (gimple_omp_continue_control_use_ptr (cont_stmt), |
319 | callback_op, wi, pset); |
320 | if (ret) |
321 | return ret; |
322 | } |
323 | break; |
324 | |
325 | case GIMPLE_OMP_CRITICAL: |
326 | { |
327 | gomp_critical *omp_stmt = as_a <gomp_critical *> (p: stmt); |
328 | ret = walk_tree (gimple_omp_critical_name_ptr (omp_stmt), |
329 | callback_op, wi, pset); |
330 | if (ret) |
331 | return ret; |
332 | ret = walk_tree (gimple_omp_critical_clauses_ptr (omp_stmt), |
333 | callback_op, wi, pset); |
334 | if (ret) |
335 | return ret; |
336 | } |
337 | break; |
338 | |
339 | case GIMPLE_OMP_ORDERED: |
340 | { |
341 | gomp_ordered *omp_stmt = as_a <gomp_ordered *> (p: stmt); |
342 | ret = walk_tree (gimple_omp_ordered_clauses_ptr (omp_stmt), |
343 | callback_op, wi, pset); |
344 | if (ret) |
345 | return ret; |
346 | } |
347 | break; |
348 | |
349 | case GIMPLE_OMP_SCAN: |
350 | { |
351 | gomp_scan *scan_stmt = as_a <gomp_scan *> (p: stmt); |
352 | ret = walk_tree (gimple_omp_scan_clauses_ptr (scan_stmt), |
353 | callback_op, wi, pset); |
354 | if (ret) |
355 | return ret; |
356 | } |
357 | break; |
358 | |
359 | case GIMPLE_OMP_FOR: |
360 | ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi, |
361 | pset); |
362 | if (ret) |
363 | return ret; |
364 | for (i = 0; i < gimple_omp_for_collapse (gs: stmt); i++) |
365 | { |
366 | ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op, |
367 | wi, pset); |
368 | if (ret) |
369 | return ret; |
370 | ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op, |
371 | wi, pset); |
372 | if (ret) |
373 | return ret; |
374 | ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op, |
375 | wi, pset); |
376 | if (ret) |
377 | return ret; |
378 | ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op, |
379 | wi, pset); |
380 | if (ret) |
381 | return ret; |
382 | } |
383 | break; |
384 | |
385 | case GIMPLE_OMP_PARALLEL: |
386 | { |
387 | gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (p: stmt); |
388 | ret = walk_tree (gimple_omp_parallel_clauses_ptr (omp_par_stmt), |
389 | callback_op, wi, pset); |
390 | if (ret) |
391 | return ret; |
392 | ret = walk_tree (gimple_omp_parallel_child_fn_ptr (omp_par_stmt), |
393 | callback_op, wi, pset); |
394 | if (ret) |
395 | return ret; |
396 | ret = walk_tree (gimple_omp_parallel_data_arg_ptr (omp_par_stmt), |
397 | callback_op, wi, pset); |
398 | if (ret) |
399 | return ret; |
400 | } |
401 | break; |
402 | |
403 | case GIMPLE_OMP_TASK: |
404 | ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op, |
405 | wi, pset); |
406 | if (ret) |
407 | return ret; |
408 | ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op, |
409 | wi, pset); |
410 | if (ret) |
411 | return ret; |
412 | ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op, |
413 | wi, pset); |
414 | if (ret) |
415 | return ret; |
416 | ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op, |
417 | wi, pset); |
418 | if (ret) |
419 | return ret; |
420 | ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op, |
421 | wi, pset); |
422 | if (ret) |
423 | return ret; |
424 | ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op, |
425 | wi, pset); |
426 | if (ret) |
427 | return ret; |
428 | break; |
429 | |
430 | case GIMPLE_OMP_SECTIONS: |
431 | ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op, |
432 | wi, pset); |
433 | if (ret) |
434 | return ret; |
435 | ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op, |
436 | wi, pset); |
437 | if (ret) |
438 | return ret; |
439 | |
440 | break; |
441 | |
442 | case GIMPLE_OMP_SINGLE: |
443 | ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi, |
444 | pset); |
445 | if (ret) |
446 | return ret; |
447 | break; |
448 | |
449 | case GIMPLE_OMP_TARGET: |
450 | { |
451 | gomp_target *omp_stmt = as_a <gomp_target *> (p: stmt); |
452 | ret = walk_tree (gimple_omp_target_clauses_ptr (omp_stmt), |
453 | callback_op, wi, pset); |
454 | if (ret) |
455 | return ret; |
456 | ret = walk_tree (gimple_omp_target_child_fn_ptr (omp_stmt), |
457 | callback_op, wi, pset); |
458 | if (ret) |
459 | return ret; |
460 | ret = walk_tree (gimple_omp_target_data_arg_ptr (omp_stmt), |
461 | callback_op, wi, pset); |
462 | if (ret) |
463 | return ret; |
464 | } |
465 | break; |
466 | |
467 | case GIMPLE_OMP_TEAMS: |
468 | ret = walk_tree (gimple_omp_teams_clauses_ptr (stmt), callback_op, wi, |
469 | pset); |
470 | if (ret) |
471 | return ret; |
472 | break; |
473 | |
474 | case GIMPLE_OMP_ATOMIC_LOAD: |
475 | { |
476 | gomp_atomic_load *omp_stmt = as_a <gomp_atomic_load *> (p: stmt); |
477 | ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (omp_stmt), |
478 | callback_op, wi, pset); |
479 | if (ret) |
480 | return ret; |
481 | ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (omp_stmt), |
482 | callback_op, wi, pset); |
483 | if (ret) |
484 | return ret; |
485 | } |
486 | break; |
487 | |
488 | case GIMPLE_OMP_ATOMIC_STORE: |
489 | { |
490 | gomp_atomic_store *omp_stmt = as_a <gomp_atomic_store *> (p: stmt); |
491 | ret = walk_tree (gimple_omp_atomic_store_val_ptr (omp_stmt), |
492 | callback_op, wi, pset); |
493 | if (ret) |
494 | return ret; |
495 | } |
496 | break; |
497 | |
498 | case GIMPLE_ASSUME: |
499 | ret = walk_tree (gimple_assume_guard_ptr (stmt), callback_op, wi, pset); |
500 | if (ret) |
501 | return ret; |
502 | break; |
503 | |
504 | case GIMPLE_TRANSACTION: |
505 | { |
506 | gtransaction *txn = as_a <gtransaction *> (p: stmt); |
507 | |
508 | ret = walk_tree (gimple_transaction_label_norm_ptr (txn), |
509 | callback_op, wi, pset); |
510 | if (ret) |
511 | return ret; |
512 | ret = walk_tree (gimple_transaction_label_uninst_ptr (txn), |
513 | callback_op, wi, pset); |
514 | if (ret) |
515 | return ret; |
516 | ret = walk_tree (gimple_transaction_label_over_ptr (txn), |
517 | callback_op, wi, pset); |
518 | if (ret) |
519 | return ret; |
520 | } |
521 | break; |
522 | |
523 | case GIMPLE_OMP_RETURN: |
524 | ret = walk_tree (gimple_omp_return_lhs_ptr (stmt), callback_op, wi, |
525 | pset); |
526 | if (ret) |
527 | return ret; |
528 | break; |
529 | |
530 | /* Tuples that do not have operands. */ |
531 | case GIMPLE_NOP: |
532 | case GIMPLE_RESX: |
533 | case GIMPLE_PREDICT: |
534 | break; |
535 | |
536 | case GIMPLE_PHI: |
537 | /* PHIs are not GSS_WITH_OPS so we need to handle them explicitely. */ |
538 | { |
539 | gphi *phi = as_a <gphi *> (p: stmt); |
540 | if (wi) |
541 | { |
542 | wi->val_only = true; |
543 | wi->is_lhs = true; |
544 | } |
545 | ret = walk_tree (gimple_phi_result_ptr (phi), callback_op, wi, pset); |
546 | if (wi) |
547 | wi->is_lhs = false; |
548 | if (ret) |
549 | return ret; |
550 | for (unsigned i = 0; i < gimple_phi_num_args (gs: phi); ++i) |
551 | { |
552 | ret = walk_tree (gimple_phi_arg_def_ptr (phi, i), |
553 | callback_op, wi, pset); |
554 | if (ret) |
555 | return ret; |
556 | } |
557 | break; |
558 | } |
559 | |
560 | default: |
561 | { |
562 | enum gimple_statement_structure_enum gss; |
563 | gss = gimple_statement_structure (gs: stmt); |
564 | if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS) |
565 | for (i = 0; i < gimple_num_ops (gs: stmt); i++) |
566 | { |
567 | ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset); |
568 | if (ret) |
569 | return ret; |
570 | } |
571 | } |
572 | break; |
573 | } |
574 | |
575 | return NULL_TREE; |
576 | } |
577 | |
578 | |
579 | /* Walk the current statement in GSI (optionally using traversal state |
580 | stored in WI). If WI is NULL, no state is kept during traversal. |
581 | The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates |
582 | that it has handled all the operands of the statement, its return |
583 | value is returned. Otherwise, the return value from CALLBACK_STMT |
584 | is discarded and its operands are scanned. |
585 | |
586 | If CALLBACK_STMT is NULL or it didn't handle the operands, |
587 | CALLBACK_OP is called on each operand of the statement via |
588 | walk_gimple_op. If walk_gimple_op returns non-NULL for any |
589 | operand, the remaining operands are not scanned. In this case, the |
590 | return value from CALLBACK_OP is returned. |
591 | |
592 | In any other case, NULL_TREE is returned. */ |
593 | |
594 | tree |
595 | walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt, |
596 | walk_tree_fn callback_op, struct walk_stmt_info *wi) |
597 | { |
598 | gimple *ret; |
599 | tree tree_ret; |
600 | gimple *stmt = gsi_stmt (i: *gsi); |
601 | |
602 | if (wi) |
603 | { |
604 | wi->gsi = *gsi; |
605 | wi->removed_stmt = false; |
606 | |
607 | if (wi->want_locations && gimple_has_location (g: stmt)) |
608 | input_location = gimple_location (g: stmt); |
609 | } |
610 | |
611 | ret = NULL; |
612 | |
613 | /* Invoke the statement callback. Return if the callback handled |
614 | all of STMT operands by itself. */ |
615 | if (callback_stmt) |
616 | { |
617 | bool handled_ops = false; |
618 | tree_ret = callback_stmt (gsi, &handled_ops, wi); |
619 | if (handled_ops) |
620 | return tree_ret; |
621 | |
622 | /* If CALLBACK_STMT did not handle operands, it should not have |
623 | a value to return. */ |
624 | gcc_assert (tree_ret == NULL); |
625 | |
626 | if (wi && wi->removed_stmt) |
627 | return NULL; |
628 | |
629 | /* Re-read stmt in case the callback changed it. */ |
630 | stmt = gsi_stmt (i: *gsi); |
631 | } |
632 | |
633 | /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */ |
634 | if (callback_op) |
635 | { |
636 | tree_ret = walk_gimple_op (stmt, callback_op, wi); |
637 | if (tree_ret) |
638 | return tree_ret; |
639 | } |
640 | |
641 | /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */ |
642 | switch (gimple_code (g: stmt)) |
643 | { |
644 | case GIMPLE_BIND: |
645 | ret = walk_gimple_seq_mod (pseq: gimple_bind_body_ptr (bind_stmt: as_a <gbind *> (p: stmt)), |
646 | callback_stmt, callback_op, wi); |
647 | if (ret) |
648 | return wi->callback_result; |
649 | break; |
650 | |
651 | case GIMPLE_CATCH: |
652 | ret = walk_gimple_seq_mod (pseq: gimple_catch_handler_ptr ( |
653 | catch_stmt: as_a <gcatch *> (p: stmt)), |
654 | callback_stmt, callback_op, wi); |
655 | if (ret) |
656 | return wi->callback_result; |
657 | break; |
658 | |
659 | case GIMPLE_EH_FILTER: |
660 | ret = walk_gimple_seq_mod (pseq: gimple_eh_filter_failure_ptr (gs: stmt), callback_stmt, |
661 | callback_op, wi); |
662 | if (ret) |
663 | return wi->callback_result; |
664 | break; |
665 | |
666 | case GIMPLE_EH_ELSE: |
667 | { |
668 | geh_else *eh_else_stmt = as_a <geh_else *> (p: stmt); |
669 | ret = walk_gimple_seq_mod (pseq: gimple_eh_else_n_body_ptr (eh_else_stmt), |
670 | callback_stmt, callback_op, wi); |
671 | if (ret) |
672 | return wi->callback_result; |
673 | ret = walk_gimple_seq_mod (pseq: gimple_eh_else_e_body_ptr (eh_else_stmt), |
674 | callback_stmt, callback_op, wi); |
675 | if (ret) |
676 | return wi->callback_result; |
677 | } |
678 | break; |
679 | |
680 | case GIMPLE_TRY: |
681 | ret = walk_gimple_seq_mod (pseq: gimple_try_eval_ptr (gs: stmt), callback_stmt, callback_op, |
682 | wi); |
683 | if (ret) |
684 | return wi->callback_result; |
685 | |
686 | ret = walk_gimple_seq_mod (pseq: gimple_try_cleanup_ptr (gs: stmt), callback_stmt, |
687 | callback_op, wi); |
688 | if (ret) |
689 | return wi->callback_result; |
690 | break; |
691 | |
692 | case GIMPLE_OMP_FOR: |
693 | ret = walk_gimple_seq_mod (pseq: gimple_omp_for_pre_body_ptr (gs: stmt), callback_stmt, |
694 | callback_op, wi); |
695 | if (ret) |
696 | return wi->callback_result; |
697 | |
698 | /* FALL THROUGH. */ |
699 | case GIMPLE_OMP_CRITICAL: |
700 | case GIMPLE_OMP_MASTER: |
701 | case GIMPLE_OMP_MASKED: |
702 | case GIMPLE_OMP_TASKGROUP: |
703 | case GIMPLE_OMP_ORDERED: |
704 | case GIMPLE_OMP_SCAN: |
705 | case GIMPLE_OMP_SECTION: |
706 | case GIMPLE_OMP_STRUCTURED_BLOCK: |
707 | case GIMPLE_OMP_PARALLEL: |
708 | case GIMPLE_OMP_TASK: |
709 | case GIMPLE_OMP_SCOPE: |
710 | case GIMPLE_OMP_SECTIONS: |
711 | case GIMPLE_OMP_SINGLE: |
712 | case GIMPLE_OMP_TARGET: |
713 | case GIMPLE_OMP_TEAMS: |
714 | ret = walk_gimple_seq_mod (pseq: gimple_omp_body_ptr (gs: stmt), callback_stmt, |
715 | callback_op, wi); |
716 | if (ret) |
717 | return wi->callback_result; |
718 | break; |
719 | |
720 | case GIMPLE_WITH_CLEANUP_EXPR: |
721 | ret = walk_gimple_seq_mod (pseq: gimple_wce_cleanup_ptr (gs: stmt), callback_stmt, |
722 | callback_op, wi); |
723 | if (ret) |
724 | return wi->callback_result; |
725 | break; |
726 | |
727 | case GIMPLE_ASSUME: |
728 | ret = walk_gimple_seq_mod (pseq: gimple_assume_body_ptr (gs: stmt), |
729 | callback_stmt, callback_op, wi); |
730 | if (ret) |
731 | return wi->callback_result; |
732 | break; |
733 | |
734 | case GIMPLE_TRANSACTION: |
735 | ret = walk_gimple_seq_mod (pseq: gimple_transaction_body_ptr ( |
736 | transaction_stmt: as_a <gtransaction *> (p: stmt)), |
737 | callback_stmt, callback_op, wi); |
738 | if (ret) |
739 | return wi->callback_result; |
740 | break; |
741 | |
742 | default: |
743 | gcc_assert (!gimple_has_substatements (stmt)); |
744 | break; |
745 | } |
746 | |
747 | return NULL; |
748 | } |
749 | |
750 | /* From a tree operand OP return the base of a load or store operation |
751 | or NULL_TREE if OP is not a load or a store. */ |
752 | |
753 | static tree |
754 | get_base_loadstore (tree op) |
755 | { |
756 | while (handled_component_p (t: op)) |
757 | op = TREE_OPERAND (op, 0); |
758 | if (DECL_P (op) |
759 | || INDIRECT_REF_P (op) |
760 | || TREE_CODE (op) == MEM_REF |
761 | || TREE_CODE (op) == TARGET_MEM_REF) |
762 | return op; |
763 | return NULL_TREE; |
764 | } |
765 | |
766 | |
767 | /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and |
768 | VISIT_ADDR if non-NULL on loads, store and address-taken operands |
769 | passing the STMT, the base of the operand, the operand itself containing |
770 | the base and DATA to it. The base will be either a decl, an indirect |
771 | reference (including TARGET_MEM_REF) or the argument of an address |
772 | expression. |
773 | Returns the results of these callbacks or'ed. */ |
774 | |
775 | bool |
776 | walk_stmt_load_store_addr_ops (gimple *stmt, void *data, |
777 | walk_stmt_load_store_addr_fn visit_load, |
778 | walk_stmt_load_store_addr_fn visit_store, |
779 | walk_stmt_load_store_addr_fn visit_addr) |
780 | { |
781 | bool ret = false; |
782 | unsigned i; |
783 | if (gimple_assign_single_p (gs: stmt)) |
784 | { |
785 | tree lhs, rhs, arg; |
786 | if (visit_store) |
787 | { |
788 | arg = gimple_assign_lhs (gs: stmt); |
789 | lhs = get_base_loadstore (op: arg); |
790 | if (lhs) |
791 | ret |= visit_store (stmt, lhs, arg, data); |
792 | } |
793 | arg = gimple_assign_rhs1 (gs: stmt); |
794 | rhs = arg; |
795 | while (handled_component_p (t: rhs)) |
796 | rhs = TREE_OPERAND (rhs, 0); |
797 | if (visit_addr) |
798 | { |
799 | if (TREE_CODE (rhs) == ADDR_EXPR) |
800 | ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), arg, data); |
801 | else if (TREE_CODE (rhs) == OBJ_TYPE_REF |
802 | && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR) |
803 | ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs), |
804 | 0), arg, data); |
805 | else if (TREE_CODE (rhs) == CONSTRUCTOR) |
806 | { |
807 | unsigned int ix; |
808 | tree val; |
809 | |
810 | FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), ix, val) |
811 | if (TREE_CODE (val) == ADDR_EXPR) |
812 | ret |= visit_addr (stmt, TREE_OPERAND (val, 0), arg, data); |
813 | else if (TREE_CODE (val) == OBJ_TYPE_REF |
814 | && TREE_CODE (OBJ_TYPE_REF_OBJECT (val)) == ADDR_EXPR) |
815 | ret |= visit_addr (stmt, |
816 | TREE_OPERAND (OBJ_TYPE_REF_OBJECT (val), |
817 | 0), arg, data); |
818 | } |
819 | } |
820 | if (visit_load) |
821 | { |
822 | rhs = get_base_loadstore (op: rhs); |
823 | if (rhs) |
824 | ret |= visit_load (stmt, rhs, arg, data); |
825 | } |
826 | } |
827 | else if (visit_addr |
828 | && (is_gimple_assign (gs: stmt) |
829 | || gimple_code (g: stmt) == GIMPLE_COND)) |
830 | { |
831 | for (i = 0; i < gimple_num_ops (gs: stmt); ++i) |
832 | { |
833 | tree op = gimple_op (gs: stmt, i); |
834 | if (op == NULL_TREE) |
835 | ; |
836 | else if (TREE_CODE (op) == ADDR_EXPR) |
837 | ret |= visit_addr (stmt, TREE_OPERAND (op, 0), op, data); |
838 | /* COND_EXPR and VCOND_EXPR rhs1 argument is a comparison |
839 | tree with two operands. */ |
840 | else if (i == 1 && COMPARISON_CLASS_P (op)) |
841 | { |
842 | if (TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR) |
843 | ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 0), |
844 | 0), op, data); |
845 | if (TREE_CODE (TREE_OPERAND (op, 1)) == ADDR_EXPR) |
846 | ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 1), |
847 | 0), op, data); |
848 | } |
849 | } |
850 | } |
851 | else if (gcall *call_stmt = dyn_cast <gcall *> (p: stmt)) |
852 | { |
853 | if (visit_store) |
854 | { |
855 | tree arg = gimple_call_lhs (gs: call_stmt); |
856 | if (arg) |
857 | { |
858 | tree lhs = get_base_loadstore (op: arg); |
859 | if (lhs) |
860 | ret |= visit_store (stmt, lhs, arg, data); |
861 | } |
862 | } |
863 | if (visit_load || visit_addr) |
864 | for (i = 0; i < gimple_call_num_args (gs: call_stmt); ++i) |
865 | { |
866 | tree arg = gimple_call_arg (gs: call_stmt, index: i); |
867 | if (visit_addr |
868 | && TREE_CODE (arg) == ADDR_EXPR) |
869 | ret |= visit_addr (stmt, TREE_OPERAND (arg, 0), arg, data); |
870 | else if (visit_load) |
871 | { |
872 | tree rhs = get_base_loadstore (op: arg); |
873 | if (rhs) |
874 | ret |= visit_load (stmt, rhs, arg, data); |
875 | } |
876 | } |
877 | if (visit_addr |
878 | && gimple_call_chain (gs: call_stmt) |
879 | && TREE_CODE (gimple_call_chain (call_stmt)) == ADDR_EXPR) |
880 | ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (call_stmt), 0), |
881 | gimple_call_chain (gs: call_stmt), data); |
882 | if (visit_addr |
883 | && gimple_call_return_slot_opt_p (s: call_stmt) |
884 | && gimple_call_lhs (gs: call_stmt) != NULL_TREE |
885 | && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (call_stmt)))) |
886 | ret |= visit_addr (stmt, gimple_call_lhs (gs: call_stmt), |
887 | gimple_call_lhs (gs: call_stmt), data); |
888 | } |
889 | else if (gasm *asm_stmt = dyn_cast <gasm *> (p: stmt)) |
890 | { |
891 | unsigned noutputs; |
892 | const char *constraint; |
893 | const char **oconstraints; |
894 | bool allows_mem, allows_reg, is_inout; |
895 | noutputs = gimple_asm_noutputs (asm_stmt); |
896 | oconstraints = XALLOCAVEC (const char *, noutputs); |
897 | if (visit_store || visit_addr) |
898 | for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i) |
899 | { |
900 | tree link = gimple_asm_output_op (asm_stmt, index: i); |
901 | tree op = get_base_loadstore (TREE_VALUE (link)); |
902 | if (op && visit_store) |
903 | ret |= visit_store (stmt, op, TREE_VALUE (link), data); |
904 | if (visit_addr) |
905 | { |
906 | constraint = TREE_STRING_POINTER |
907 | (TREE_VALUE (TREE_PURPOSE (link))); |
908 | oconstraints[i] = constraint; |
909 | parse_output_constraint (&constraint, i, 0, 0, &allows_mem, |
910 | &allows_reg, &is_inout); |
911 | if (op && !allows_reg && allows_mem) |
912 | ret |= visit_addr (stmt, op, TREE_VALUE (link), data); |
913 | } |
914 | } |
915 | if (visit_load || visit_addr) |
916 | for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i) |
917 | { |
918 | tree link = gimple_asm_input_op (asm_stmt, index: i); |
919 | tree op = TREE_VALUE (link); |
920 | if (visit_addr |
921 | && TREE_CODE (op) == ADDR_EXPR) |
922 | ret |= visit_addr (stmt, TREE_OPERAND (op, 0), op, data); |
923 | else if (visit_load || visit_addr) |
924 | { |
925 | op = get_base_loadstore (op); |
926 | if (op) |
927 | { |
928 | if (visit_load) |
929 | ret |= visit_load (stmt, op, TREE_VALUE (link), data); |
930 | if (visit_addr) |
931 | { |
932 | constraint = TREE_STRING_POINTER |
933 | (TREE_VALUE (TREE_PURPOSE (link))); |
934 | parse_input_constraint (&constraint, 0, 0, noutputs, |
935 | 0, oconstraints, |
936 | &allows_mem, &allows_reg); |
937 | if (!allows_reg && allows_mem) |
938 | ret |= visit_addr (stmt, op, TREE_VALUE (link), |
939 | data); |
940 | } |
941 | } |
942 | } |
943 | } |
944 | } |
945 | else if (greturn *return_stmt = dyn_cast <greturn *> (p: stmt)) |
946 | { |
947 | tree op = gimple_return_retval (gs: return_stmt); |
948 | if (op) |
949 | { |
950 | if (visit_addr |
951 | && TREE_CODE (op) == ADDR_EXPR) |
952 | ret |= visit_addr (stmt, TREE_OPERAND (op, 0), op, data); |
953 | else if (visit_load) |
954 | { |
955 | tree base = get_base_loadstore (op); |
956 | if (base) |
957 | ret |= visit_load (stmt, base, op, data); |
958 | } |
959 | } |
960 | } |
961 | else if (visit_addr |
962 | && gimple_code (g: stmt) == GIMPLE_PHI) |
963 | { |
964 | for (i = 0; i < gimple_phi_num_args (gs: stmt); ++i) |
965 | { |
966 | tree op = gimple_phi_arg_def (gs: stmt, index: i); |
967 | if (TREE_CODE (op) == ADDR_EXPR) |
968 | ret |= visit_addr (stmt, TREE_OPERAND (op, 0), op, data); |
969 | } |
970 | } |
971 | else if (visit_addr |
972 | && gimple_code (g: stmt) == GIMPLE_GOTO) |
973 | { |
974 | tree op = gimple_goto_dest (gs: stmt); |
975 | if (TREE_CODE (op) == ADDR_EXPR) |
976 | ret |= visit_addr (stmt, TREE_OPERAND (op, 0), op, data); |
977 | } |
978 | |
979 | return ret; |
980 | } |
981 | |
982 | /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP |
983 | should make a faster clone for this case. */ |
984 | |
985 | bool |
986 | walk_stmt_load_store_ops (gimple *stmt, void *data, |
987 | walk_stmt_load_store_addr_fn visit_load, |
988 | walk_stmt_load_store_addr_fn visit_store) |
989 | { |
990 | return walk_stmt_load_store_addr_ops (stmt, data, |
991 | visit_load, visit_store, NULL); |
992 | } |
993 | |