1 | /* Post reload partially redundant load elimination |
2 | Copyright (C) 2004-2024 Free Software Foundation, Inc. |
3 | |
4 | This file is part of GCC. |
5 | |
6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free |
8 | Software Foundation; either version 3, or (at your option) any later |
9 | version. |
10 | |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
14 | for more details. |
15 | |
16 | You should have received a copy of the GNU General Public License |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ |
19 | |
20 | #include "config.h" |
21 | #include "system.h" |
22 | #include "coretypes.h" |
23 | #include "backend.h" |
24 | #include "target.h" |
25 | #include "rtl.h" |
26 | #include "tree.h" |
27 | #include "predict.h" |
28 | #include "df.h" |
29 | #include "memmodel.h" |
30 | #include "tm_p.h" |
31 | #include "insn-config.h" |
32 | #include "emit-rtl.h" |
33 | #include "recog.h" |
34 | |
35 | #include "cfgrtl.h" |
36 | #include "profile.h" |
37 | #include "expr.h" |
38 | #include "tree-pass.h" |
39 | #include "dbgcnt.h" |
40 | #include "intl.h" |
41 | #include "gcse-common.h" |
42 | #include "gcse.h" |
43 | #include "regs.h" |
44 | #include "function-abi.h" |
45 | |
46 | /* The following code implements gcse after reload, the purpose of this |
47 | pass is to cleanup redundant loads generated by reload and other |
48 | optimizations that come after gcse. It searches for simple inter-block |
49 | redundancies and tries to eliminate them by adding moves and loads |
50 | in cold places. |
51 | |
52 | Perform partially redundant load elimination, try to eliminate redundant |
53 | loads created by the reload pass. We try to look for full or partial |
54 | redundant loads fed by one or more loads/stores in predecessor BBs, |
55 | and try adding loads to make them fully redundant. We also check if |
56 | it's worth adding loads to be able to delete the redundant load. |
57 | |
58 | Algorithm: |
59 | 1. Build available expressions hash table: |
60 | For each load/store instruction, if the loaded/stored memory didn't |
61 | change until the end of the basic block add this memory expression to |
62 | the hash table. |
63 | 2. Perform Redundancy elimination: |
64 | For each load instruction do the following: |
65 | perform partial redundancy elimination, check if it's worth adding |
66 | loads to make the load fully redundant. If so add loads and |
67 | register copies and delete the load. |
68 | 3. Delete instructions made redundant in step 2. |
69 | |
70 | Future enhancement: |
71 | If the loaded register is used/defined between load and some store, |
72 | look for some other free register between load and all its stores, |
73 | and replace the load with a copy from this register to the loaded |
74 | register. |
75 | */ |
76 | |
77 | |
78 | /* Keep statistics of this pass. */ |
79 | static struct |
80 | { |
81 | int moves_inserted; |
82 | int copies_inserted; |
83 | int insns_deleted; |
84 | } stats; |
85 | |
86 | /* We need to keep a hash table of expressions. The table entries are of |
87 | type 'struct expr', and for each expression there is a single linked |
88 | list of occurrences. */ |
89 | |
90 | /* Expression elements in the hash table. */ |
91 | struct expr |
92 | { |
93 | /* The expression (SET_SRC for expressions, PATTERN for assignments). */ |
94 | rtx expr; |
95 | |
96 | /* The same hash for this entry. */ |
97 | hashval_t hash; |
98 | |
99 | /* Index in the transparent bitmaps. */ |
100 | unsigned int bitmap_index; |
101 | |
102 | /* List of available occurrence in basic blocks in the function. */ |
103 | struct occr *avail_occr; |
104 | }; |
105 | |
106 | /* Hashtable helpers. */ |
107 | |
108 | struct expr_hasher : nofree_ptr_hash <expr> |
109 | { |
110 | static inline hashval_t hash (const expr *); |
111 | static inline bool equal (const expr *, const expr *); |
112 | }; |
113 | |
114 | |
115 | /* Hash expression X. |
116 | DO_NOT_RECORD_P is a boolean indicating if a volatile operand is found |
117 | or if the expression contains something we don't want to insert in the |
118 | table. */ |
119 | |
120 | static hashval_t |
121 | hash_expr (rtx x, int *do_not_record_p) |
122 | { |
123 | *do_not_record_p = 0; |
124 | return hash_rtx (x, GET_MODE (x), do_not_record_p, |
125 | NULL, /*have_reg_qty=*/false); |
126 | } |
127 | |
128 | /* Callback for hashtab. |
129 | Return the hash value for expression EXP. We don't actually hash |
130 | here, we just return the cached hash value. */ |
131 | |
132 | inline hashval_t |
133 | expr_hasher::hash (const expr *exp) |
134 | { |
135 | return exp->hash; |
136 | } |
137 | |
138 | /* Callback for hashtab. |
139 | Return nonzero if exp1 is equivalent to exp2. */ |
140 | |
141 | inline bool |
142 | expr_hasher::equal (const expr *exp1, const expr *exp2) |
143 | { |
144 | bool equiv_p = exp_equiv_p (exp1->expr, exp2->expr, 0, true); |
145 | |
146 | gcc_assert (!equiv_p || exp1->hash == exp2->hash); |
147 | return equiv_p; |
148 | } |
149 | |
150 | /* The table itself. */ |
151 | static hash_table<expr_hasher> *expr_table; |
152 | |
153 | |
154 | static struct obstack expr_obstack; |
155 | |
156 | /* Occurrence of an expression. |
157 | There is at most one occurrence per basic block. If a pattern appears |
158 | more than once, the last appearance is used. */ |
159 | |
160 | struct occr |
161 | { |
162 | /* Next occurrence of this expression. */ |
163 | struct occr *next; |
164 | /* The insn that computes the expression. */ |
165 | rtx_insn *insn; |
166 | /* Nonzero if this [anticipatable] occurrence has been deleted. */ |
167 | char deleted_p; |
168 | }; |
169 | |
170 | static struct obstack occr_obstack; |
171 | |
172 | /* The following structure holds the information about the occurrences of |
173 | the redundant instructions. */ |
174 | struct unoccr |
175 | { |
176 | struct unoccr *next; |
177 | edge pred; |
178 | rtx_insn *insn; |
179 | }; |
180 | |
181 | static struct obstack unoccr_obstack; |
182 | |
183 | /* Array where each element is the CUID if the insn that last set the hard |
184 | register with the number of the element, since the start of the current |
185 | basic block. |
186 | |
187 | This array is used during the building of the hash table (step 1) to |
188 | determine if a reg is killed before the end of a basic block. |
189 | |
190 | It is also used when eliminating partial redundancies (step 2) to see |
191 | if a reg was modified since the start of a basic block. */ |
192 | static int *reg_avail_info; |
193 | |
194 | /* A list of insns that may modify memory within the current basic block. */ |
195 | struct modifies_mem |
196 | { |
197 | rtx_insn *insn; |
198 | struct modifies_mem *next; |
199 | }; |
200 | static struct modifies_mem *modifies_mem_list; |
201 | |
202 | /* The modifies_mem structs also go on an obstack, only this obstack is |
203 | freed each time after completing the analysis or transformations on |
204 | a basic block. So we allocate a dummy modifies_mem_obstack_bottom |
205 | object on the obstack to keep track of the bottom of the obstack. */ |
206 | static struct obstack modifies_mem_obstack; |
207 | static struct modifies_mem *modifies_mem_obstack_bottom; |
208 | |
209 | /* Mapping of insn UIDs to CUIDs. |
210 | CUIDs are like UIDs except they increase monotonically in each basic |
211 | block, have no gaps, and only apply to real insns. */ |
212 | static int *uid_cuid; |
213 | #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)]) |
214 | |
215 | /* Bitmap of blocks which have memory stores. */ |
216 | static bitmap modify_mem_list_set; |
217 | |
218 | /* Bitmap of blocks which have calls. */ |
219 | static bitmap blocks_with_calls; |
220 | |
221 | /* Vector indexed by block # with a list of all the insns that |
222 | modify memory within the block. */ |
223 | static vec<rtx_insn *> *modify_mem_list; |
224 | |
225 | /* Vector indexed by block # with a canonicalized list of insns |
226 | that modify memory in the block. */ |
227 | static vec<modify_pair> *canon_modify_mem_list; |
228 | |
229 | /* Vector of simple bitmaps indexed by block number. Each component sbitmap |
230 | indicates which expressions are transparent through the block. */ |
231 | static sbitmap *transp; |
232 | |
233 | |
234 | /* Helpers for memory allocation/freeing. */ |
235 | static void alloc_mem (void); |
236 | static void free_mem (void); |
237 | |
238 | /* Support for hash table construction and transformations. */ |
239 | static bool oprs_unchanged_p (rtx, rtx_insn *, bool); |
240 | static void record_last_reg_set_info (rtx_insn *, rtx); |
241 | static void record_last_reg_set_info_regno (rtx_insn *, int); |
242 | static void record_last_mem_set_info (rtx_insn *); |
243 | static void record_last_set_info (rtx, const_rtx, void *); |
244 | static void record_opr_changes (rtx_insn *); |
245 | |
246 | static void find_mem_conflicts (rtx, const_rtx, void *); |
247 | static bool load_killed_in_block_p (int, rtx, bool); |
248 | static void reset_opr_set_tables (void); |
249 | |
250 | /* Hash table support. */ |
251 | static hashval_t hash_expr (rtx, int *); |
252 | static void insert_expr_in_table (rtx, rtx_insn *); |
253 | static struct expr *lookup_expr_in_table (rtx); |
254 | static void dump_hash_table (FILE *); |
255 | |
256 | /* Helpers for eliminate_partially_redundant_load. */ |
257 | static bool reg_killed_on_edge (rtx, edge); |
258 | static bool reg_used_on_edge (rtx, edge); |
259 | |
260 | static rtx get_avail_load_store_reg (rtx_insn *); |
261 | |
262 | static bool bb_has_well_behaved_predecessors (basic_block); |
263 | static struct occr* get_bb_avail_insn (basic_block, struct occr *, int); |
264 | static void hash_scan_set (rtx_insn *); |
265 | static void compute_hash_table (void); |
266 | |
267 | /* The work horses of this pass. */ |
268 | static void eliminate_partially_redundant_load (basic_block, |
269 | rtx_insn *, |
270 | struct expr *); |
271 | static void eliminate_partially_redundant_loads (void); |
272 | |
273 | |
274 | /* Allocate memory for the CUID mapping array and register/memory |
275 | tracking tables. */ |
276 | |
277 | static void |
278 | alloc_mem (void) |
279 | { |
280 | int i; |
281 | basic_block bb; |
282 | rtx_insn *insn; |
283 | |
284 | /* Find the largest UID and create a mapping from UIDs to CUIDs. */ |
285 | uid_cuid = XCNEWVEC (int, get_max_uid () + 1); |
286 | i = 1; |
287 | FOR_EACH_BB_FN (bb, cfun) |
288 | FOR_BB_INSNS (bb, insn) |
289 | { |
290 | if (INSN_P (insn)) |
291 | uid_cuid[INSN_UID (insn)] = i++; |
292 | else |
293 | uid_cuid[INSN_UID (insn)] = i; |
294 | } |
295 | |
296 | /* Allocate the available expressions hash table. We don't want to |
297 | make the hash table too small, but unnecessarily making it too large |
298 | also doesn't help. The i/4 is a gcse.cc relic, and seems like a |
299 | reasonable choice. */ |
300 | expr_table = new hash_table<expr_hasher> (MAX (i / 4, 13)); |
301 | |
302 | /* We allocate everything on obstacks because we often can roll back |
303 | the whole obstack to some point. Freeing obstacks is very fast. */ |
304 | gcc_obstack_init (&expr_obstack); |
305 | gcc_obstack_init (&occr_obstack); |
306 | gcc_obstack_init (&unoccr_obstack); |
307 | gcc_obstack_init (&modifies_mem_obstack); |
308 | |
309 | /* Working array used to track the last set for each register |
310 | in the current block. */ |
311 | reg_avail_info = (int *) xmalloc (FIRST_PSEUDO_REGISTER * sizeof (int)); |
312 | |
313 | /* Put a dummy modifies_mem object on the modifies_mem_obstack, so we |
314 | can roll it back in reset_opr_set_tables. */ |
315 | modifies_mem_obstack_bottom = |
316 | (struct modifies_mem *) obstack_alloc (&modifies_mem_obstack, |
317 | sizeof (struct modifies_mem)); |
318 | |
319 | blocks_with_calls = BITMAP_ALLOC (NULL); |
320 | modify_mem_list_set = BITMAP_ALLOC (NULL); |
321 | |
322 | modify_mem_list = (vec_rtx_heap *) xcalloc (last_basic_block_for_fn (cfun), |
323 | sizeof (vec_rtx_heap)); |
324 | canon_modify_mem_list |
325 | = (vec_modify_pair_heap *) xcalloc (last_basic_block_for_fn (cfun), |
326 | sizeof (vec_modify_pair_heap)); |
327 | } |
328 | |
329 | /* Free memory allocated by alloc_mem. */ |
330 | |
331 | static void |
332 | free_mem (void) |
333 | { |
334 | free (ptr: uid_cuid); |
335 | |
336 | delete expr_table; |
337 | expr_table = NULL; |
338 | |
339 | obstack_free (&expr_obstack, NULL); |
340 | obstack_free (&occr_obstack, NULL); |
341 | obstack_free (&unoccr_obstack, NULL); |
342 | obstack_free (&modifies_mem_obstack, NULL); |
343 | |
344 | unsigned i; |
345 | bitmap_iterator bi; |
346 | EXECUTE_IF_SET_IN_BITMAP (modify_mem_list_set, 0, i, bi) |
347 | { |
348 | modify_mem_list[i].release (); |
349 | canon_modify_mem_list[i].release (); |
350 | } |
351 | |
352 | BITMAP_FREE (blocks_with_calls); |
353 | BITMAP_FREE (modify_mem_list_set); |
354 | free (ptr: reg_avail_info); |
355 | free (ptr: modify_mem_list); |
356 | free (ptr: canon_modify_mem_list); |
357 | } |
358 | |
359 | |
360 | /* Insert expression X in INSN in the hash TABLE. |
361 | If it is already present, record it as the last occurrence in INSN's |
362 | basic block. */ |
363 | |
364 | static void |
365 | insert_expr_in_table (rtx x, rtx_insn *insn) |
366 | { |
367 | int do_not_record_p; |
368 | hashval_t hash; |
369 | struct expr *cur_expr, **slot; |
370 | struct occr *avail_occr; |
371 | |
372 | hash = hash_expr (x, &do_not_record_p); |
373 | |
374 | /* Do not insert expression in the table if it contains volatile operands, |
375 | or if hash_expr determines the expression is something we don't want |
376 | to or can't handle. */ |
377 | if (do_not_record_p) |
378 | return; |
379 | |
380 | /* We anticipate that redundant expressions are rare, so for convenience |
381 | allocate a new hash table element here already and set its fields. |
382 | If we don't do this, we need a hack with a static struct expr. Anyway, |
383 | obstack_free is really fast and one more obstack_alloc doesn't hurt if |
384 | we're going to see more expressions later on. */ |
385 | cur_expr = (struct expr *) obstack_alloc (&expr_obstack, |
386 | sizeof (struct expr)); |
387 | cur_expr->expr = x; |
388 | cur_expr->hash = hash; |
389 | cur_expr->avail_occr = NULL; |
390 | |
391 | slot = expr_table->find_slot_with_hash (comparable: cur_expr, hash, insert: INSERT); |
392 | |
393 | if (! (*slot)) |
394 | { |
395 | /* The expression isn't found, so insert it. */ |
396 | *slot = cur_expr; |
397 | |
398 | /* Anytime we add an entry to the table, record the index |
399 | of the new entry. The bitmap index starts counting |
400 | at zero. */ |
401 | cur_expr->bitmap_index = expr_table->elements () - 1; |
402 | } |
403 | else |
404 | { |
405 | /* The expression is already in the table, so roll back the |
406 | obstack and use the existing table entry. */ |
407 | obstack_free (&expr_obstack, cur_expr); |
408 | cur_expr = *slot; |
409 | } |
410 | |
411 | /* Search for another occurrence in the same basic block. We insert |
412 | insns blockwise from start to end, so keep appending to the |
413 | start of the list so we have to check only a single element. */ |
414 | avail_occr = cur_expr->avail_occr; |
415 | if (avail_occr |
416 | && BLOCK_FOR_INSN (insn: avail_occr->insn) == BLOCK_FOR_INSN (insn)) |
417 | avail_occr->insn = insn; |
418 | else |
419 | { |
420 | /* First occurrence of this expression in this basic block. */ |
421 | avail_occr = (struct occr *) obstack_alloc (&occr_obstack, |
422 | sizeof (struct occr)); |
423 | avail_occr->insn = insn; |
424 | avail_occr->next = cur_expr->avail_occr; |
425 | avail_occr->deleted_p = 0; |
426 | cur_expr->avail_occr = avail_occr; |
427 | } |
428 | } |
429 | |
430 | |
431 | /* Lookup pattern PAT in the expression hash table. |
432 | The result is a pointer to the table entry, or NULL if not found. */ |
433 | |
434 | static struct expr * |
435 | lookup_expr_in_table (rtx pat) |
436 | { |
437 | int do_not_record_p; |
438 | struct expr **slot, *tmp_expr; |
439 | hashval_t hash = hash_expr (pat, &do_not_record_p); |
440 | |
441 | if (do_not_record_p) |
442 | return NULL; |
443 | |
444 | tmp_expr = (struct expr *) obstack_alloc (&expr_obstack, |
445 | sizeof (struct expr)); |
446 | tmp_expr->expr = pat; |
447 | tmp_expr->hash = hash; |
448 | tmp_expr->avail_occr = NULL; |
449 | |
450 | slot = expr_table->find_slot_with_hash (comparable: tmp_expr, hash, insert: NO_INSERT); |
451 | obstack_free (&expr_obstack, tmp_expr); |
452 | |
453 | if (!slot) |
454 | return NULL; |
455 | else |
456 | return (*slot); |
457 | } |
458 | |
459 | |
460 | /* Dump all expressions and occurrences that are currently in the |
461 | expression hash table to FILE. */ |
462 | |
463 | /* This helper is called via htab_traverse. */ |
464 | int |
465 | dump_expr_hash_table_entry (expr **slot, FILE *file) |
466 | { |
467 | struct expr *exprs = *slot; |
468 | struct occr *occr; |
469 | |
470 | fprintf (stream: file, format: "expr: " ); |
471 | print_rtl (file, exprs->expr); |
472 | fprintf (stream: file,format: "\nhashcode: %u\n" , exprs->hash); |
473 | fprintf (stream: file,format: "list of occurrences:\n" ); |
474 | occr = exprs->avail_occr; |
475 | while (occr) |
476 | { |
477 | rtx_insn *insn = occr->insn; |
478 | print_rtl_single (file, insn); |
479 | fprintf (stream: file, format: "\n" ); |
480 | occr = occr->next; |
481 | } |
482 | fprintf (stream: file, format: "\n" ); |
483 | return 1; |
484 | } |
485 | |
486 | static void |
487 | dump_hash_table (FILE *file) |
488 | { |
489 | fprintf (stream: file, format: "\n\nexpression hash table\n" ); |
490 | fprintf (stream: file, format: "size " HOST_SIZE_T_PRINT_DEC ", " HOST_SIZE_T_PRINT_DEC |
491 | " elements, %f collision/search ratio\n" , |
492 | (fmt_size_t) expr_table->size (), |
493 | (fmt_size_t) expr_table->elements (), |
494 | expr_table->collisions ()); |
495 | if (!expr_table->is_empty ()) |
496 | { |
497 | fprintf (stream: file, format: "\n\ntable entries:\n" ); |
498 | expr_table->traverse <FILE *, dump_expr_hash_table_entry> (argument: file); |
499 | } |
500 | fprintf (stream: file, format: "\n" ); |
501 | } |
502 | |
503 | /* Return true if register X is recorded as being set by an instruction |
504 | whose CUID is greater than the one given. */ |
505 | |
506 | static bool |
507 | reg_changed_after_insn_p (rtx x, int cuid) |
508 | { |
509 | unsigned int regno, end_regno; |
510 | |
511 | regno = REGNO (x); |
512 | end_regno = END_REGNO (x); |
513 | do |
514 | if (reg_avail_info[regno] > cuid) |
515 | return true; |
516 | while (++regno < end_regno); |
517 | return false; |
518 | } |
519 | |
520 | /* Return nonzero if the operands of expression X are unchanged |
521 | 1) from the start of INSN's basic block up to but not including INSN |
522 | if AFTER_INSN is false, or |
523 | 2) from INSN to the end of INSN's basic block if AFTER_INSN is true. */ |
524 | |
525 | static bool |
526 | oprs_unchanged_p (rtx x, rtx_insn *insn, bool after_insn) |
527 | { |
528 | int i, j; |
529 | enum rtx_code code; |
530 | const char *fmt; |
531 | |
532 | if (x == 0) |
533 | return true; |
534 | |
535 | code = GET_CODE (x); |
536 | switch (code) |
537 | { |
538 | case REG: |
539 | /* We are called after register allocation. */ |
540 | gcc_assert (REGNO (x) < FIRST_PSEUDO_REGISTER); |
541 | if (after_insn) |
542 | return !reg_changed_after_insn_p (x, INSN_CUID (insn) - 1); |
543 | else |
544 | return !reg_changed_after_insn_p (x, cuid: 0); |
545 | |
546 | case MEM: |
547 | if (load_killed_in_block_p (INSN_CUID (insn), x, after_insn)) |
548 | return false; |
549 | else |
550 | return oprs_unchanged_p (XEXP (x, 0), insn, after_insn); |
551 | |
552 | case PC: |
553 | case CONST: |
554 | CASE_CONST_ANY: |
555 | case SYMBOL_REF: |
556 | case LABEL_REF: |
557 | case ADDR_VEC: |
558 | case ADDR_DIFF_VEC: |
559 | return true; |
560 | |
561 | case PRE_DEC: |
562 | case PRE_INC: |
563 | case POST_DEC: |
564 | case POST_INC: |
565 | case PRE_MODIFY: |
566 | case POST_MODIFY: |
567 | if (after_insn) |
568 | return false; |
569 | break; |
570 | |
571 | default: |
572 | break; |
573 | } |
574 | |
575 | for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--) |
576 | { |
577 | if (fmt[i] == 'e') |
578 | { |
579 | if (! oprs_unchanged_p (XEXP (x, i), insn, after_insn)) |
580 | return false; |
581 | } |
582 | else if (fmt[i] == 'E') |
583 | for (j = 0; j < XVECLEN (x, i); j++) |
584 | if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, after_insn)) |
585 | return false; |
586 | } |
587 | |
588 | return true; |
589 | } |
590 | |
591 | |
592 | /* Used for communication between find_mem_conflicts and |
593 | load_killed_in_block_p. Nonzero if find_mem_conflicts finds a |
594 | conflict between two memory references. |
595 | This is a bit of a hack to work around the limitations of note_stores. */ |
596 | static int mems_conflict_p; |
597 | |
598 | /* DEST is the output of an instruction. If it is a memory reference, and |
599 | possibly conflicts with the load found in DATA, then set mems_conflict_p |
600 | to a nonzero value. */ |
601 | |
602 | static void |
603 | find_mem_conflicts (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, |
604 | void *data) |
605 | { |
606 | rtx mem_op = (rtx) data; |
607 | |
608 | while (GET_CODE (dest) == SUBREG |
609 | || GET_CODE (dest) == ZERO_EXTRACT |
610 | || GET_CODE (dest) == STRICT_LOW_PART) |
611 | dest = XEXP (dest, 0); |
612 | |
613 | /* If DEST is not a MEM, then it will not conflict with the load. Note |
614 | that function calls are assumed to clobber memory, but are handled |
615 | elsewhere. */ |
616 | if (! MEM_P (dest)) |
617 | return; |
618 | |
619 | if (true_dependence (dest, GET_MODE (dest), mem_op)) |
620 | mems_conflict_p = 1; |
621 | } |
622 | |
623 | |
624 | /* Return nonzero if the expression in X (a memory reference) is killed |
625 | in the current basic block before (if AFTER_INSN is false) or after |
626 | (if AFTER_INSN is true) the insn with the CUID in UID_LIMIT. |
627 | |
628 | This function assumes that the modifies_mem table is flushed when |
629 | the hash table construction or redundancy elimination phases start |
630 | processing a new basic block. */ |
631 | |
632 | static bool |
633 | load_killed_in_block_p (int uid_limit, rtx x, bool after_insn) |
634 | { |
635 | struct modifies_mem *list_entry = modifies_mem_list; |
636 | |
637 | while (list_entry) |
638 | { |
639 | rtx_insn *setter = list_entry->insn; |
640 | |
641 | /* Ignore entries in the list that do not apply. */ |
642 | if ((after_insn |
643 | && INSN_CUID (setter) < uid_limit) |
644 | || (! after_insn |
645 | && INSN_CUID (setter) > uid_limit)) |
646 | { |
647 | list_entry = list_entry->next; |
648 | continue; |
649 | } |
650 | |
651 | /* If SETTER is a call everything is clobbered. Note that calls |
652 | to pure functions are never put on the list, so we need not |
653 | worry about them. */ |
654 | if (CALL_P (setter)) |
655 | return true; |
656 | |
657 | /* SETTER must be an insn of some kind that sets memory. Call |
658 | note_stores to examine each hunk of memory that is modified. |
659 | It will set mems_conflict_p to nonzero if there may be a |
660 | conflict between X and SETTER. */ |
661 | mems_conflict_p = 0; |
662 | note_stores (setter, find_mem_conflicts, x); |
663 | if (mems_conflict_p) |
664 | return true; |
665 | |
666 | list_entry = list_entry->next; |
667 | } |
668 | return false; |
669 | } |
670 | |
671 | |
672 | /* Record register first/last/block set information for REGNO in INSN. */ |
673 | |
674 | static inline void |
675 | record_last_reg_set_info (rtx_insn *insn, rtx reg) |
676 | { |
677 | unsigned int regno, end_regno; |
678 | |
679 | regno = REGNO (reg); |
680 | end_regno = END_REGNO (x: reg); |
681 | do |
682 | reg_avail_info[regno] = INSN_CUID (insn); |
683 | while (++regno < end_regno); |
684 | } |
685 | |
686 | static inline void |
687 | record_last_reg_set_info_regno (rtx_insn *insn, int regno) |
688 | { |
689 | reg_avail_info[regno] = INSN_CUID (insn); |
690 | } |
691 | |
692 | |
693 | /* Record memory modification information for INSN. We do not actually care |
694 | about the memory location(s) that are set, or even how they are set (consider |
695 | a CALL_INSN). We merely need to record which insns modify memory. */ |
696 | |
697 | static void |
698 | record_last_mem_set_info (rtx_insn *insn) |
699 | { |
700 | struct modifies_mem *list_entry; |
701 | |
702 | list_entry = (struct modifies_mem *) obstack_alloc (&modifies_mem_obstack, |
703 | sizeof (struct modifies_mem)); |
704 | list_entry->insn = insn; |
705 | list_entry->next = modifies_mem_list; |
706 | modifies_mem_list = list_entry; |
707 | |
708 | record_last_mem_set_info_common (insn, modify_mem_list, |
709 | canon_modify_mem_list, |
710 | modify_mem_list_set, |
711 | blocks_with_calls); |
712 | } |
713 | |
714 | /* Called from compute_hash_table via note_stores to handle one |
715 | SET or CLOBBER in an insn. DATA is really the instruction in which |
716 | the SET is taking place. */ |
717 | |
718 | static void |
719 | record_last_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, void *data) |
720 | { |
721 | rtx_insn *last_set_insn = (rtx_insn *) data; |
722 | |
723 | if (GET_CODE (dest) == SUBREG) |
724 | dest = SUBREG_REG (dest); |
725 | |
726 | if (REG_P (dest)) |
727 | record_last_reg_set_info (insn: last_set_insn, reg: dest); |
728 | else if (MEM_P (dest)) |
729 | { |
730 | /* Ignore pushes, they don't clobber memory. They may still |
731 | clobber the stack pointer though. Some targets do argument |
732 | pushes without adding REG_INC notes. See e.g. PR25196, |
733 | where a pushsi2 on i386 doesn't have REG_INC notes. Note |
734 | such changes here too. */ |
735 | if (! push_operand (dest, GET_MODE (dest))) |
736 | record_last_mem_set_info (insn: last_set_insn); |
737 | else |
738 | record_last_reg_set_info_regno (insn: last_set_insn, STACK_POINTER_REGNUM); |
739 | } |
740 | } |
741 | |
742 | |
743 | /* Reset tables used to keep track of what's still available since the |
744 | start of the block. */ |
745 | |
746 | static void |
747 | reset_opr_set_tables (void) |
748 | { |
749 | memset (s: reg_avail_info, c: 0, FIRST_PSEUDO_REGISTER * sizeof (int)); |
750 | obstack_free (&modifies_mem_obstack, modifies_mem_obstack_bottom); |
751 | modifies_mem_list = NULL; |
752 | } |
753 | |
754 | |
755 | /* Record things set by INSN. |
756 | This data is used by oprs_unchanged_p. */ |
757 | |
758 | static void |
759 | record_opr_changes (rtx_insn *insn) |
760 | { |
761 | rtx note; |
762 | |
763 | /* Find all stores and record them. */ |
764 | note_stores (insn, record_last_set_info, insn); |
765 | |
766 | /* Also record autoincremented REGs for this insn as changed. */ |
767 | for (note = REG_NOTES (insn); note; note = XEXP (note, 1)) |
768 | if (REG_NOTE_KIND (note) == REG_INC) |
769 | record_last_reg_set_info (insn, XEXP (note, 0)); |
770 | |
771 | /* Finally, if this is a call, record all call clobbers. */ |
772 | if (CALL_P (insn)) |
773 | { |
774 | unsigned int regno; |
775 | hard_reg_set_iterator hrsi; |
776 | /* We don't track modes of hard registers, so we need to be |
777 | conservative and assume that partial kills are full kills. */ |
778 | HARD_REG_SET callee_clobbers |
779 | = insn_callee_abi (insn).full_and_partial_reg_clobbers (); |
780 | EXECUTE_IF_SET_IN_HARD_REG_SET (callee_clobbers, 0, regno, hrsi) |
781 | record_last_reg_set_info_regno (insn, regno); |
782 | |
783 | if (! RTL_CONST_OR_PURE_CALL_P (insn) |
784 | || RTL_LOOPING_CONST_OR_PURE_CALL_P (insn) |
785 | || can_throw_external (insn)) |
786 | record_last_mem_set_info (insn); |
787 | } |
788 | } |
789 | |
790 | |
791 | /* Scan the pattern of INSN and add an entry to the hash TABLE. |
792 | After reload we are interested in loads/stores only. */ |
793 | |
794 | static void |
795 | hash_scan_set (rtx_insn *insn) |
796 | { |
797 | rtx pat = PATTERN (insn); |
798 | rtx src = SET_SRC (pat); |
799 | rtx dest = SET_DEST (pat); |
800 | |
801 | /* We are only interested in loads and stores. */ |
802 | if (! MEM_P (src) && ! MEM_P (dest)) |
803 | return; |
804 | |
805 | /* Don't mess with jumps and nops. */ |
806 | if (JUMP_P (insn) || set_noop_p (pat)) |
807 | return; |
808 | |
809 | if (REG_P (dest)) |
810 | { |
811 | if (/* Don't CSE something if we can't do a reg/reg copy. */ |
812 | can_copy_p (GET_MODE (dest)) |
813 | /* Is SET_SRC something we want to gcse? */ |
814 | && general_operand (src, GET_MODE (src)) |
815 | #ifdef STACK_REGS |
816 | /* Never consider insns touching the register stack. It may |
817 | create situations that reg-stack cannot handle (e.g. a stack |
818 | register live across an abnormal edge). */ |
819 | && (REGNO (dest) < FIRST_STACK_REG || REGNO (dest) > LAST_STACK_REG) |
820 | #endif |
821 | /* An expression is not available if its operands are |
822 | subsequently modified, including this insn. */ |
823 | && oprs_unchanged_p (x: src, insn, after_insn: true)) |
824 | { |
825 | insert_expr_in_table (x: src, insn); |
826 | } |
827 | } |
828 | else if (REG_P (src)) |
829 | { |
830 | /* Only record sets of pseudo-regs in the hash table. */ |
831 | if (/* Don't CSE something if we can't do a reg/reg copy. */ |
832 | can_copy_p (GET_MODE (src)) |
833 | /* Is SET_DEST something we want to gcse? */ |
834 | && general_operand (dest, GET_MODE (dest)) |
835 | #ifdef STACK_REGS |
836 | /* As above for STACK_REGS. */ |
837 | && (REGNO (src) < FIRST_STACK_REG || REGNO (src) > LAST_STACK_REG) |
838 | #endif |
839 | && ! (flag_float_store && FLOAT_MODE_P (GET_MODE (dest))) |
840 | /* Check if the memory expression is killed after insn. */ |
841 | && ! load_killed_in_block_p (INSN_CUID (insn) + 1, x: dest, after_insn: true) |
842 | && oprs_unchanged_p (XEXP (dest, 0), insn, after_insn: true)) |
843 | { |
844 | insert_expr_in_table (x: dest, insn); |
845 | } |
846 | } |
847 | } |
848 | |
849 | |
850 | /* Create hash table of memory expressions available at end of basic |
851 | blocks. Basically you should think of this hash table as the |
852 | representation of AVAIL_OUT. This is the set of expressions that |
853 | is generated in a basic block and not killed before the end of the |
854 | same basic block. Notice that this is really a local computation. */ |
855 | |
856 | static void |
857 | compute_hash_table (void) |
858 | { |
859 | basic_block bb; |
860 | |
861 | FOR_EACH_BB_FN (bb, cfun) |
862 | { |
863 | rtx_insn *insn; |
864 | |
865 | /* First pass over the instructions records information used to |
866 | determine when registers and memory are last set. |
867 | Since we compute a "local" AVAIL_OUT, reset the tables that |
868 | help us keep track of what has been modified since the start |
869 | of the block. */ |
870 | reset_opr_set_tables (); |
871 | FOR_BB_INSNS (bb, insn) |
872 | { |
873 | if (INSN_P (insn)) |
874 | record_opr_changes (insn); |
875 | } |
876 | |
877 | /* The next pass actually builds the hash table. */ |
878 | FOR_BB_INSNS (bb, insn) |
879 | if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == SET) |
880 | hash_scan_set (insn); |
881 | } |
882 | } |
883 | |
884 | |
885 | /* Check if register REG is killed in any insn waiting to be inserted on |
886 | edge E. This function is required to check that our data flow analysis |
887 | is still valid prior to commit_edge_insertions. */ |
888 | |
889 | static bool |
890 | reg_killed_on_edge (rtx reg, edge e) |
891 | { |
892 | rtx_insn *insn; |
893 | |
894 | for (insn = e->insns.r; insn; insn = NEXT_INSN (insn)) |
895 | if (INSN_P (insn) && reg_set_p (reg, insn)) |
896 | return true; |
897 | |
898 | return false; |
899 | } |
900 | |
901 | /* Similar to above - check if register REG is used in any insn waiting |
902 | to be inserted on edge E. |
903 | Assumes no such insn can be a CALL_INSN; if so call reg_used_between_p |
904 | with PREV(insn),NEXT(insn) instead of calling reg_overlap_mentioned_p. */ |
905 | |
906 | static bool |
907 | reg_used_on_edge (rtx reg, edge e) |
908 | { |
909 | rtx_insn *insn; |
910 | |
911 | for (insn = e->insns.r; insn; insn = NEXT_INSN (insn)) |
912 | if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn))) |
913 | return true; |
914 | |
915 | return false; |
916 | } |
917 | |
918 | /* Return the loaded/stored register of a load/store instruction. */ |
919 | |
920 | static rtx |
921 | get_avail_load_store_reg (rtx_insn *insn) |
922 | { |
923 | if (REG_P (SET_DEST (PATTERN (insn)))) |
924 | /* A load. */ |
925 | return SET_DEST (PATTERN (insn)); |
926 | else |
927 | { |
928 | /* A store. */ |
929 | gcc_assert (REG_P (SET_SRC (PATTERN (insn)))); |
930 | return SET_SRC (PATTERN (insn)); |
931 | } |
932 | } |
933 | |
934 | /* Return true if the predecessors of BB are "well behaved". */ |
935 | |
936 | static bool |
937 | bb_has_well_behaved_predecessors (basic_block bb) |
938 | { |
939 | edge pred; |
940 | edge_iterator ei; |
941 | |
942 | if (EDGE_COUNT (bb->preds) == 0) |
943 | return false; |
944 | |
945 | FOR_EACH_EDGE (pred, ei, bb->preds) |
946 | { |
947 | /* commit_one_edge_insertion refuses to insert on abnormal edges even if |
948 | the source has only one successor so EDGE_CRITICAL_P is too weak. */ |
949 | if ((pred->flags & EDGE_ABNORMAL) && !single_pred_p (bb: pred->dest)) |
950 | return false; |
951 | |
952 | if ((pred->flags & EDGE_ABNORMAL_CALL) && cfun->has_nonlocal_label) |
953 | return false; |
954 | |
955 | if (tablejump_p (BB_END (pred->src), NULL, NULL)) |
956 | return false; |
957 | } |
958 | return true; |
959 | } |
960 | |
961 | |
962 | /* Search for the occurrences of expression in BB. */ |
963 | |
964 | static struct occr* |
965 | get_bb_avail_insn (basic_block bb, struct occr *orig_occr, int bitmap_index) |
966 | { |
967 | struct occr *occr = orig_occr; |
968 | |
969 | for (; occr != NULL; occr = occr->next) |
970 | if (BLOCK_FOR_INSN (insn: occr->insn) == bb) |
971 | return occr; |
972 | |
973 | /* If we could not find an occurrence in BB, see if BB |
974 | has a single predecessor with an occurrence that is |
975 | transparent through BB. */ |
976 | if (transp |
977 | && single_pred_p (bb) |
978 | && bitmap_bit_p (map: transp[bb->index], bitno: bitmap_index) |
979 | && (occr = get_bb_avail_insn (bb: single_pred (bb), orig_occr, bitmap_index))) |
980 | { |
981 | rtx avail_reg = get_avail_load_store_reg (insn: occr->insn); |
982 | if (!reg_set_between_p (avail_reg, |
983 | PREV_INSN (BB_HEAD (bb)), |
984 | NEXT_INSN (BB_END (bb))) |
985 | && !reg_killed_on_edge (reg: avail_reg, e: single_pred_edge (bb))) |
986 | return occr; |
987 | } |
988 | |
989 | return NULL; |
990 | } |
991 | |
992 | |
993 | /* This helper is called via htab_traverse. */ |
994 | int |
995 | compute_expr_transp (expr **slot, FILE *dump_file ATTRIBUTE_UNUSED) |
996 | { |
997 | struct expr *expr = *slot; |
998 | |
999 | compute_transp (expr->expr, expr->bitmap_index, transp, |
1000 | blocks_with_calls, modify_mem_list_set, |
1001 | canon_modify_mem_list); |
1002 | return 1; |
1003 | } |
1004 | |
1005 | /* This handles the case where several stores feed a partially redundant |
1006 | load. It checks if the redundancy elimination is possible and if it's |
1007 | worth it. |
1008 | |
1009 | Redundancy elimination is possible if, |
1010 | 1) None of the operands of an insn have been modified since the start |
1011 | of the current basic block. |
1012 | 2) In any predecessor of the current basic block, the same expression |
1013 | is generated. |
1014 | |
1015 | See the function body for the heuristics that determine if eliminating |
1016 | a redundancy is also worth doing, assuming it is possible. */ |
1017 | |
1018 | static void |
1019 | eliminate_partially_redundant_load (basic_block bb, rtx_insn *insn, |
1020 | struct expr *expr) |
1021 | { |
1022 | edge pred; |
1023 | rtx_insn *avail_insn = NULL; |
1024 | rtx avail_reg; |
1025 | rtx dest, pat; |
1026 | struct occr *a_occr; |
1027 | struct unoccr *occr, *avail_occrs = NULL; |
1028 | struct unoccr *unoccr, *unavail_occrs = NULL, *rollback_unoccr = NULL; |
1029 | int npred_ok = 0; |
1030 | profile_count ok_count = profile_count::zero (); |
1031 | /* Redundant load execution count. */ |
1032 | profile_count critical_count = profile_count::zero (); |
1033 | /* Execution count of critical edges. */ |
1034 | edge_iterator ei; |
1035 | bool critical_edge_split = false; |
1036 | |
1037 | /* The execution count of the loads to be added to make the |
1038 | load fully redundant. */ |
1039 | profile_count not_ok_count = profile_count::zero (); |
1040 | basic_block pred_bb; |
1041 | |
1042 | pat = PATTERN (insn); |
1043 | dest = SET_DEST (pat); |
1044 | |
1045 | /* Check that the loaded register is not used, set, or killed from the |
1046 | beginning of the block. */ |
1047 | if (reg_changed_after_insn_p (x: dest, cuid: 0) |
1048 | || reg_used_between_p (dest, PREV_INSN (BB_HEAD (bb)), insn)) |
1049 | return; |
1050 | |
1051 | /* Check potential for replacing load with copy for predecessors. */ |
1052 | FOR_EACH_EDGE (pred, ei, bb->preds) |
1053 | { |
1054 | rtx_insn *next_pred_bb_end; |
1055 | |
1056 | avail_insn = NULL; |
1057 | avail_reg = NULL_RTX; |
1058 | pred_bb = pred->src; |
1059 | for (a_occr = get_bb_avail_insn (bb: pred_bb, |
1060 | orig_occr: expr->avail_occr, |
1061 | bitmap_index: expr->bitmap_index); |
1062 | a_occr; |
1063 | a_occr = get_bb_avail_insn (bb: pred_bb, |
1064 | orig_occr: a_occr->next, |
1065 | bitmap_index: expr->bitmap_index)) |
1066 | { |
1067 | /* Check if the loaded register is not used. */ |
1068 | avail_insn = a_occr->insn; |
1069 | avail_reg = get_avail_load_store_reg (insn: avail_insn); |
1070 | gcc_assert (avail_reg); |
1071 | |
1072 | /* Make sure we can generate a move from register avail_reg to |
1073 | dest. */ |
1074 | rtx_insn *move = gen_move_insn (copy_rtx (dest), |
1075 | copy_rtx (avail_reg)); |
1076 | extract_insn (move); |
1077 | if (! constrain_operands (1, get_preferred_alternatives (insn, |
1078 | pred_bb)) |
1079 | || reg_killed_on_edge (reg: avail_reg, e: pred) |
1080 | || reg_used_on_edge (reg: dest, e: pred)) |
1081 | { |
1082 | avail_insn = NULL; |
1083 | continue; |
1084 | } |
1085 | next_pred_bb_end = NEXT_INSN (BB_END (BLOCK_FOR_INSN (avail_insn))); |
1086 | if (!reg_set_between_p (avail_reg, avail_insn, next_pred_bb_end)) |
1087 | /* AVAIL_INSN remains non-null. */ |
1088 | break; |
1089 | else |
1090 | avail_insn = NULL; |
1091 | } |
1092 | |
1093 | if (EDGE_CRITICAL_P (pred) && pred->count ().initialized_p ()) |
1094 | critical_count += pred->count (); |
1095 | |
1096 | if (avail_insn != NULL_RTX) |
1097 | { |
1098 | npred_ok++; |
1099 | if (pred->count ().initialized_p ()) |
1100 | ok_count = ok_count + pred->count (); |
1101 | if (! set_noop_p (PATTERN (insn: gen_move_insn (copy_rtx (dest), |
1102 | copy_rtx (avail_reg))))) |
1103 | { |
1104 | /* Check if there is going to be a split. */ |
1105 | if (EDGE_CRITICAL_P (pred)) |
1106 | critical_edge_split = true; |
1107 | } |
1108 | else /* Its a dead move no need to generate. */ |
1109 | continue; |
1110 | occr = (struct unoccr *) obstack_alloc (&unoccr_obstack, |
1111 | sizeof (struct unoccr)); |
1112 | occr->insn = avail_insn; |
1113 | occr->pred = pred; |
1114 | occr->next = avail_occrs; |
1115 | avail_occrs = occr; |
1116 | if (! rollback_unoccr) |
1117 | rollback_unoccr = occr; |
1118 | } |
1119 | else |
1120 | { |
1121 | /* Adding a load on a critical edge will cause a split. */ |
1122 | if (EDGE_CRITICAL_P (pred)) |
1123 | critical_edge_split = true; |
1124 | if (pred->count ().initialized_p ()) |
1125 | not_ok_count = not_ok_count + pred->count (); |
1126 | unoccr = (struct unoccr *) obstack_alloc (&unoccr_obstack, |
1127 | sizeof (struct unoccr)); |
1128 | unoccr->insn = NULL; |
1129 | unoccr->pred = pred; |
1130 | unoccr->next = unavail_occrs; |
1131 | unavail_occrs = unoccr; |
1132 | if (! rollback_unoccr) |
1133 | rollback_unoccr = unoccr; |
1134 | } |
1135 | } |
1136 | |
1137 | if (/* No load can be replaced by copy. */ |
1138 | npred_ok == 0 |
1139 | /* Prevent exploding the code. */ |
1140 | || (optimize_bb_for_size_p (bb) && npred_ok > 1) |
1141 | /* If we don't have profile information we cannot tell if splitting |
1142 | a critical edge is profitable or not so don't do it. */ |
1143 | || ((!profile_info || profile_status_for_fn (cfun) != PROFILE_READ |
1144 | || targetm.cannot_modify_jumps_p ()) |
1145 | && critical_edge_split)) |
1146 | goto cleanup; |
1147 | |
1148 | /* Check if it's worth applying the partial redundancy elimination. */ |
1149 | if (ok_count.to_gcov_type () |
1150 | < param_gcse_after_reload_partial_fraction * not_ok_count.to_gcov_type ()) |
1151 | goto cleanup; |
1152 | |
1153 | gcov_type threshold; |
1154 | #if (GCC_VERSION >= 5000) |
1155 | if (__builtin_mul_overflow (param_gcse_after_reload_critical_fraction, |
1156 | critical_count.to_gcov_type (), &threshold)) |
1157 | threshold = profile_count::max_count; |
1158 | #else |
1159 | threshold |
1160 | = (param_gcse_after_reload_critical_fraction |
1161 | * critical_count.to_gcov_type ()); |
1162 | #endif |
1163 | |
1164 | if (ok_count.to_gcov_type () < threshold) |
1165 | goto cleanup; |
1166 | |
1167 | /* Generate moves to the loaded register from where |
1168 | the memory is available. */ |
1169 | for (occr = avail_occrs; occr; occr = occr->next) |
1170 | { |
1171 | avail_insn = occr->insn; |
1172 | pred = occr->pred; |
1173 | /* Set avail_reg to be the register having the value of the |
1174 | memory. */ |
1175 | avail_reg = get_avail_load_store_reg (insn: avail_insn); |
1176 | gcc_assert (avail_reg); |
1177 | |
1178 | insert_insn_on_edge (gen_move_insn (copy_rtx (dest), |
1179 | copy_rtx (avail_reg)), |
1180 | pred); |
1181 | stats.moves_inserted++; |
1182 | |
1183 | if (dump_file) |
1184 | fprintf (stream: dump_file, |
1185 | format: "generating move from %d to %d on edge from %d to %d\n" , |
1186 | REGNO (avail_reg), |
1187 | REGNO (dest), |
1188 | pred->src->index, |
1189 | pred->dest->index); |
1190 | } |
1191 | |
1192 | /* Regenerate loads where the memory is unavailable. */ |
1193 | for (unoccr = unavail_occrs; unoccr; unoccr = unoccr->next) |
1194 | { |
1195 | pred = unoccr->pred; |
1196 | insert_insn_on_edge (copy_insn (PATTERN (insn)), pred); |
1197 | stats.copies_inserted++; |
1198 | |
1199 | if (dump_file) |
1200 | { |
1201 | fprintf (stream: dump_file, |
1202 | format: "generating on edge from %d to %d a copy of load: " , |
1203 | pred->src->index, |
1204 | pred->dest->index); |
1205 | print_rtl (dump_file, PATTERN (insn)); |
1206 | fprintf (stream: dump_file, format: "\n" ); |
1207 | } |
1208 | } |
1209 | |
1210 | /* Delete the insn if it is not available in this block and mark it |
1211 | for deletion if it is available. If insn is available it may help |
1212 | discover additional redundancies, so mark it for later deletion. */ |
1213 | for (a_occr = get_bb_avail_insn (bb, orig_occr: expr->avail_occr, bitmap_index: expr->bitmap_index); |
1214 | a_occr && (a_occr->insn != insn); |
1215 | a_occr = get_bb_avail_insn (bb, orig_occr: a_occr->next, bitmap_index: expr->bitmap_index)) |
1216 | ; |
1217 | |
1218 | if (!a_occr) |
1219 | { |
1220 | stats.insns_deleted++; |
1221 | |
1222 | if (dump_file) |
1223 | { |
1224 | fprintf (stream: dump_file, format: "deleting insn:\n" ); |
1225 | print_rtl_single (dump_file, insn); |
1226 | fprintf (stream: dump_file, format: "\n" ); |
1227 | } |
1228 | delete_insn (insn); |
1229 | } |
1230 | else |
1231 | a_occr->deleted_p = 1; |
1232 | |
1233 | cleanup: |
1234 | if (rollback_unoccr) |
1235 | obstack_free (&unoccr_obstack, rollback_unoccr); |
1236 | } |
1237 | |
1238 | /* Performing the redundancy elimination as described before. */ |
1239 | |
1240 | static void |
1241 | eliminate_partially_redundant_loads (void) |
1242 | { |
1243 | rtx_insn *insn; |
1244 | basic_block bb; |
1245 | |
1246 | /* Note we start at block 1. */ |
1247 | |
1248 | if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)) |
1249 | return; |
1250 | |
1251 | FOR_BB_BETWEEN (bb, |
1252 | ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->next_bb, |
1253 | EXIT_BLOCK_PTR_FOR_FN (cfun), |
1254 | next_bb) |
1255 | { |
1256 | /* Don't try anything on basic blocks with strange predecessors. */ |
1257 | if (! bb_has_well_behaved_predecessors (bb)) |
1258 | continue; |
1259 | |
1260 | /* Do not try anything on cold basic blocks. */ |
1261 | if (optimize_bb_for_size_p (bb)) |
1262 | continue; |
1263 | |
1264 | /* Reset the table of things changed since the start of the current |
1265 | basic block. */ |
1266 | reset_opr_set_tables (); |
1267 | |
1268 | /* Look at all insns in the current basic block and see if there are |
1269 | any loads in it that we can record. */ |
1270 | FOR_BB_INSNS (bb, insn) |
1271 | { |
1272 | /* Is it a load - of the form (set (reg) (mem))? */ |
1273 | if (NONJUMP_INSN_P (insn) |
1274 | && GET_CODE (PATTERN (insn)) == SET |
1275 | && REG_P (SET_DEST (PATTERN (insn))) |
1276 | && MEM_P (SET_SRC (PATTERN (insn)))) |
1277 | { |
1278 | rtx pat = PATTERN (insn); |
1279 | rtx src = SET_SRC (pat); |
1280 | struct expr *expr; |
1281 | |
1282 | if (!MEM_VOLATILE_P (src) |
1283 | && GET_MODE (src) != BLKmode |
1284 | && general_operand (src, GET_MODE (src)) |
1285 | /* Are the operands unchanged since the start of the |
1286 | block? */ |
1287 | && oprs_unchanged_p (x: src, insn, after_insn: false) |
1288 | && !(cfun->can_throw_non_call_exceptions && may_trap_p (src)) |
1289 | && !side_effects_p (src) |
1290 | /* Is the expression recorded? */ |
1291 | && (expr = lookup_expr_in_table (pat: src)) != NULL) |
1292 | { |
1293 | /* We now have a load (insn) and an available memory at |
1294 | its BB start (expr). Try to remove the loads if it is |
1295 | redundant. */ |
1296 | eliminate_partially_redundant_load (bb, insn, expr); |
1297 | } |
1298 | } |
1299 | |
1300 | /* Keep track of everything modified by this insn, so that we |
1301 | know what has been modified since the start of the current |
1302 | basic block. */ |
1303 | if (INSN_P (insn)) |
1304 | record_opr_changes (insn); |
1305 | } |
1306 | } |
1307 | |
1308 | commit_edge_insertions (); |
1309 | } |
1310 | |
1311 | /* Go over the expression hash table and delete insns that were |
1312 | marked for later deletion. */ |
1313 | |
1314 | /* This helper is called via htab_traverse. */ |
1315 | int |
1316 | delete_redundant_insns_1 (expr **slot, void *data ATTRIBUTE_UNUSED) |
1317 | { |
1318 | struct expr *exprs = *slot; |
1319 | struct occr *occr; |
1320 | |
1321 | for (occr = exprs->avail_occr; occr != NULL; occr = occr->next) |
1322 | { |
1323 | if (occr->deleted_p && dbg_cnt (index: gcse2_delete)) |
1324 | { |
1325 | delete_insn (occr->insn); |
1326 | stats.insns_deleted++; |
1327 | |
1328 | if (dump_file) |
1329 | { |
1330 | fprintf (stream: dump_file, format: "deleting insn:\n" ); |
1331 | print_rtl_single (dump_file, occr->insn); |
1332 | fprintf (stream: dump_file, format: "\n" ); |
1333 | } |
1334 | } |
1335 | } |
1336 | |
1337 | return 1; |
1338 | } |
1339 | |
1340 | static void |
1341 | delete_redundant_insns (void) |
1342 | { |
1343 | expr_table->traverse <void *, delete_redundant_insns_1> (NULL); |
1344 | if (dump_file) |
1345 | fprintf (stream: dump_file, format: "\n" ); |
1346 | } |
1347 | |
1348 | /* Main entry point of the GCSE after reload - clean some redundant loads |
1349 | due to spilling. */ |
1350 | |
1351 | static void |
1352 | gcse_after_reload_main (rtx f ATTRIBUTE_UNUSED) |
1353 | { |
1354 | /* Disable computing transparentness if it is too expensive. */ |
1355 | bool do_transp |
1356 | = !gcse_or_cprop_is_too_expensive (_("using simple load CSE after register " |
1357 | "allocation" )); |
1358 | |
1359 | memset (s: &stats, c: 0, n: sizeof (stats)); |
1360 | |
1361 | /* Allocate memory for this pass. |
1362 | Also computes and initializes the insns' CUIDs. */ |
1363 | alloc_mem (); |
1364 | |
1365 | /* We need alias analysis. */ |
1366 | init_alias_analysis (); |
1367 | |
1368 | compute_hash_table (); |
1369 | |
1370 | if (dump_file) |
1371 | dump_hash_table (file: dump_file); |
1372 | |
1373 | if (!expr_table->is_empty ()) |
1374 | { |
1375 | /* Knowing which MEMs are transparent through a block can signifiantly |
1376 | increase the number of redundant loads found. So compute transparency |
1377 | information for each memory expression in the hash table. */ |
1378 | df_analyze (); |
1379 | if (do_transp) |
1380 | { |
1381 | /* This cannot be part of the normal allocation routine because |
1382 | we have to know the number of elements in the hash table. */ |
1383 | transp = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), |
1384 | expr_table->elements ()); |
1385 | bitmap_vector_ones (transp, last_basic_block_for_fn (cfun)); |
1386 | expr_table->traverse <FILE *, compute_expr_transp> (argument: dump_file); |
1387 | } |
1388 | else |
1389 | transp = NULL; |
1390 | eliminate_partially_redundant_loads (); |
1391 | delete_redundant_insns (); |
1392 | if (do_transp) |
1393 | sbitmap_vector_free (vec: transp); |
1394 | |
1395 | if (dump_file) |
1396 | { |
1397 | fprintf (stream: dump_file, format: "GCSE AFTER RELOAD stats:\n" ); |
1398 | fprintf (stream: dump_file, format: "copies inserted: %d\n" , stats.copies_inserted); |
1399 | fprintf (stream: dump_file, format: "moves inserted: %d\n" , stats.moves_inserted); |
1400 | fprintf (stream: dump_file, format: "insns deleted: %d\n" , stats.insns_deleted); |
1401 | fprintf (stream: dump_file, format: "\n\n" ); |
1402 | } |
1403 | |
1404 | statistics_counter_event (cfun, "copies inserted" , |
1405 | stats.copies_inserted); |
1406 | statistics_counter_event (cfun, "moves inserted" , |
1407 | stats.moves_inserted); |
1408 | statistics_counter_event (cfun, "insns deleted" , |
1409 | stats.insns_deleted); |
1410 | } |
1411 | |
1412 | /* We are finished with alias. */ |
1413 | end_alias_analysis (); |
1414 | |
1415 | free_mem (); |
1416 | } |
1417 | |
1418 | |
1419 | |
1420 | static void |
1421 | rest_of_handle_gcse2 (void) |
1422 | { |
1423 | gcse_after_reload_main (f: get_insns ()); |
1424 | rebuild_jump_labels (get_insns ()); |
1425 | } |
1426 | |
1427 | namespace { |
1428 | |
1429 | const pass_data pass_data_gcse2 = |
1430 | { |
1431 | .type: RTL_PASS, /* type */ |
1432 | .name: "gcse2" , /* name */ |
1433 | .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */ |
1434 | .tv_id: TV_GCSE_AFTER_RELOAD, /* tv_id */ |
1435 | .properties_required: 0, /* properties_required */ |
1436 | .properties_provided: 0, /* properties_provided */ |
1437 | .properties_destroyed: 0, /* properties_destroyed */ |
1438 | .todo_flags_start: 0, /* todo_flags_start */ |
1439 | .todo_flags_finish: 0, /* todo_flags_finish */ |
1440 | }; |
1441 | |
1442 | class pass_gcse2 : public rtl_opt_pass |
1443 | { |
1444 | public: |
1445 | pass_gcse2 (gcc::context *ctxt) |
1446 | : rtl_opt_pass (pass_data_gcse2, ctxt) |
1447 | {} |
1448 | |
1449 | /* opt_pass methods: */ |
1450 | bool gate (function *fun) final override |
1451 | { |
1452 | return (optimize > 0 && flag_gcse_after_reload |
1453 | && optimize_function_for_speed_p (fun)); |
1454 | } |
1455 | |
1456 | unsigned int execute (function *) final override |
1457 | { |
1458 | rest_of_handle_gcse2 (); |
1459 | return 0; |
1460 | } |
1461 | |
1462 | }; // class pass_gcse2 |
1463 | |
1464 | } // anon namespace |
1465 | |
1466 | rtl_opt_pass * |
1467 | make_pass_gcse2 (gcc::context *ctxt) |
1468 | { |
1469 | return new pass_gcse2 (ctxt); |
1470 | } |
1471 | |