1/* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "backend.h"
24#include "target.h"
25#include "rtl.h"
26#include "tree.h"
27#include "predict.h"
28#include "df.h"
29#include "memmodel.h"
30#include "tm_p.h"
31#include "optabs.h"
32#include "regs.h"
33#include "ira.h"
34#include "recog.h"
35
36#include "rtl-error.h"
37#include "expr.h"
38#include "addresses.h"
39#include "cfgrtl.h"
40#include "cfgbuild.h"
41#include "reload.h"
42#include "except.h"
43#include "dumpfile.h"
44#include "rtl-iter.h"
45
46/* This file contains the reload pass of the compiler, which is
47 run after register allocation has been done. It checks that
48 each insn is valid (operands required to be in registers really
49 are in registers of the proper class) and fixes up invalid ones
50 by copying values temporarily into registers for the insns
51 that need them.
52
53 The results of register allocation are described by the vector
54 reg_renumber; the insns still contain pseudo regs, but reg_renumber
55 can be used to find which hard reg, if any, a pseudo reg is in.
56
57 The technique we always use is to free up a few hard regs that are
58 called ``reload regs'', and for each place where a pseudo reg
59 must be in a hard reg, copy it temporarily into one of the reload regs.
60
61 Reload regs are allocated locally for every instruction that needs
62 reloads. When there are pseudos which are allocated to a register that
63 has been chosen as a reload reg, such pseudos must be ``spilled''.
64 This means that they go to other hard regs, or to stack slots if no other
65 available hard regs can be found. Spilling can invalidate more
66 insns, requiring additional need for reloads, so we must keep checking
67 until the process stabilizes.
68
69 For machines with different classes of registers, we must keep track
70 of the register class needed for each reload, and make sure that
71 we allocate enough reload registers of each class.
72
73 The file reload.c contains the code that checks one insn for
74 validity and reports the reloads that it needs. This file
75 is in charge of scanning the entire rtl code, accumulating the
76 reload needs, spilling, assigning reload registers to use for
77 fixing up each insn, and generating the new insns to copy values
78 into the reload registers. */
79
80struct target_reload default_target_reload;
81#if SWITCHABLE_TARGET
82struct target_reload *this_target_reload = &default_target_reload;
83#endif
84
85#define spill_indirect_levels \
86 (this_target_reload->x_spill_indirect_levels)
87
88/* During reload_as_needed, element N contains a REG rtx for the hard reg
89 into which reg N has been reloaded (perhaps for a previous insn). */
90static rtx *reg_last_reload_reg;
91
92/* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
93 for an output reload that stores into reg N. */
94static regset_head reg_has_output_reload;
95
96/* Indicates which hard regs are reload-registers for an output reload
97 in the current insn. */
98static HARD_REG_SET reg_is_output_reload;
99
100/* Widest mode in which each pseudo reg is referred to (via subreg). */
101static machine_mode *reg_max_ref_mode;
102
103/* Vector to remember old contents of reg_renumber before spilling. */
104static short *reg_old_renumber;
105
106/* During reload_as_needed, element N contains the last pseudo regno reloaded
107 into hard register N. If that pseudo reg occupied more than one register,
108 reg_reloaded_contents points to that pseudo for each spill register in
109 use; all of these must remain set for an inheritance to occur. */
110static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
111
112/* During reload_as_needed, element N contains the insn for which
113 hard register N was last used. Its contents are significant only
114 when reg_reloaded_valid is set for this register. */
115static rtx_insn *reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
116
117/* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid. */
118static HARD_REG_SET reg_reloaded_valid;
119/* Indicate if the register was dead at the end of the reload.
120 This is only valid if reg_reloaded_contents is set and valid. */
121static HARD_REG_SET reg_reloaded_dead;
122
123/* Indicate whether the register's current value is one that is not
124 safe to retain across a call, even for registers that are normally
125 call-saved. This is only meaningful for members of reg_reloaded_valid. */
126static HARD_REG_SET reg_reloaded_call_part_clobbered;
127
128/* Number of spill-regs so far; number of valid elements of spill_regs. */
129static int n_spills;
130
131/* In parallel with spill_regs, contains REG rtx's for those regs.
132 Holds the last rtx used for any given reg, or 0 if it has never
133 been used for spilling yet. This rtx is reused, provided it has
134 the proper mode. */
135static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
136
137/* In parallel with spill_regs, contains nonzero for a spill reg
138 that was stored after the last time it was used.
139 The precise value is the insn generated to do the store. */
140static rtx_insn *spill_reg_store[FIRST_PSEUDO_REGISTER];
141
142/* This is the register that was stored with spill_reg_store. This is a
143 copy of reload_out / reload_out_reg when the value was stored; if
144 reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
145static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
146
147/* This table is the inverse mapping of spill_regs:
148 indexed by hard reg number,
149 it contains the position of that reg in spill_regs,
150 or -1 for something that is not in spill_regs.
151
152 ?!? This is no longer accurate. */
153static short spill_reg_order[FIRST_PSEUDO_REGISTER];
154
155/* This reg set indicates registers that can't be used as spill registers for
156 the currently processed insn. These are the hard registers which are live
157 during the insn, but not allocated to pseudos, as well as fixed
158 registers. */
159static HARD_REG_SET bad_spill_regs;
160
161/* These are the hard registers that can't be used as spill register for any
162 insn. This includes registers used for user variables and registers that
163 we can't eliminate. A register that appears in this set also can't be used
164 to retry register allocation. */
165static HARD_REG_SET bad_spill_regs_global;
166
167/* Describes order of use of registers for reloading
168 of spilled pseudo-registers. `n_spills' is the number of
169 elements that are actually valid; new ones are added at the end.
170
171 Both spill_regs and spill_reg_order are used on two occasions:
172 once during find_reload_regs, where they keep track of the spill registers
173 for a single insn, but also during reload_as_needed where they show all
174 the registers ever used by reload. For the latter case, the information
175 is calculated during finish_spills. */
176static short spill_regs[FIRST_PSEUDO_REGISTER];
177
178/* This vector of reg sets indicates, for each pseudo, which hard registers
179 may not be used for retrying global allocation because the register was
180 formerly spilled from one of them. If we allowed reallocating a pseudo to
181 a register that it was already allocated to, reload might not
182 terminate. */
183static HARD_REG_SET *pseudo_previous_regs;
184
185/* This vector of reg sets indicates, for each pseudo, which hard
186 registers may not be used for retrying global allocation because they
187 are used as spill registers during one of the insns in which the
188 pseudo is live. */
189static HARD_REG_SET *pseudo_forbidden_regs;
190
191/* All hard regs that have been used as spill registers for any insn are
192 marked in this set. */
193static HARD_REG_SET used_spill_regs;
194
195/* Index of last register assigned as a spill register. We allocate in
196 a round-robin fashion. */
197static int last_spill_reg;
198
199/* Record the stack slot for each spilled hard register. */
200static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
201
202/* Width allocated so far for that stack slot. */
203static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
204
205/* Record which pseudos needed to be spilled. */
206static regset_head spilled_pseudos;
207
208/* Record which pseudos changed their allocation in finish_spills. */
209static regset_head changed_allocation_pseudos;
210
211/* Used for communication between order_regs_for_reload and count_pseudo.
212 Used to avoid counting one pseudo twice. */
213static regset_head pseudos_counted;
214
215/* First uid used by insns created by reload in this function.
216 Used in find_equiv_reg. */
217int reload_first_uid;
218
219/* Flag set by local-alloc or global-alloc if anything is live in
220 a call-clobbered reg across calls. */
221int caller_save_needed;
222
223/* Set to 1 while reload_as_needed is operating.
224 Required by some machines to handle any generated moves differently. */
225int reload_in_progress = 0;
226
227/* This obstack is used for allocation of rtl during register elimination.
228 The allocated storage can be freed once find_reloads has processed the
229 insn. */
230static struct obstack reload_obstack;
231
232/* Points to the beginning of the reload_obstack. All insn_chain structures
233 are allocated first. */
234static char *reload_startobj;
235
236/* The point after all insn_chain structures. Used to quickly deallocate
237 memory allocated in copy_reloads during calculate_needs_all_insns. */
238static char *reload_firstobj;
239
240/* This points before all local rtl generated by register elimination.
241 Used to quickly free all memory after processing one insn. */
242static char *reload_insn_firstobj;
243
244/* List of insn_chain instructions, one for every insn that reload needs to
245 examine. */
246struct insn_chain *reload_insn_chain;
247
248/* TRUE if we potentially left dead insns in the insn stream and want to
249 run DCE immediately after reload, FALSE otherwise. */
250static bool need_dce;
251
252/* List of all insns needing reloads. */
253static struct insn_chain *insns_need_reload;
254
255/* This structure is used to record information about register eliminations.
256 Each array entry describes one possible way of eliminating a register
257 in favor of another. If there is more than one way of eliminating a
258 particular register, the most preferred should be specified first. */
259
260struct elim_table
261{
262 int from; /* Register number to be eliminated. */
263 int to; /* Register number used as replacement. */
264 HOST_WIDE_INT initial_offset; /* Initial difference between values. */
265 int can_eliminate; /* Nonzero if this elimination can be done. */
266 int can_eliminate_previous; /* Value returned by TARGET_CAN_ELIMINATE
267 target hook in previous scan over insns
268 made by reload. */
269 HOST_WIDE_INT offset; /* Current offset between the two regs. */
270 HOST_WIDE_INT previous_offset;/* Offset at end of previous insn. */
271 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
272 rtx from_rtx; /* REG rtx for the register to be eliminated.
273 We cannot simply compare the number since
274 we might then spuriously replace a hard
275 register corresponding to a pseudo
276 assigned to the reg to be eliminated. */
277 rtx to_rtx; /* REG rtx for the replacement. */
278};
279
280static struct elim_table *reg_eliminate = 0;
281
282/* This is an intermediate structure to initialize the table. It has
283 exactly the members provided by ELIMINABLE_REGS. */
284static const struct elim_table_1
285{
286 const int from;
287 const int to;
288} reg_eliminate_1[] =
289
290 ELIMINABLE_REGS;
291
292#define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
293
294/* Record the number of pending eliminations that have an offset not equal
295 to their initial offset. If nonzero, we use a new copy of each
296 replacement result in any insns encountered. */
297int num_not_at_initial_offset;
298
299/* Count the number of registers that we may be able to eliminate. */
300static int num_eliminable;
301/* And the number of registers that are equivalent to a constant that
302 can be eliminated to frame_pointer / arg_pointer + constant. */
303static int num_eliminable_invariants;
304
305/* For each label, we record the offset of each elimination. If we reach
306 a label by more than one path and an offset differs, we cannot do the
307 elimination. This information is indexed by the difference of the
308 number of the label and the first label number. We can't offset the
309 pointer itself as this can cause problems on machines with segmented
310 memory. The first table is an array of flags that records whether we
311 have yet encountered a label and the second table is an array of arrays,
312 one entry in the latter array for each elimination. */
313
314static int first_label_num;
315static char *offsets_known_at;
316static HOST_WIDE_INT (*offsets_at)[NUM_ELIMINABLE_REGS];
317
318vec<reg_equivs_t, va_gc> *reg_equivs;
319
320/* Stack of addresses where an rtx has been changed. We can undo the
321 changes by popping items off the stack and restoring the original
322 value at each location.
323
324 We use this simplistic undo capability rather than copy_rtx as copy_rtx
325 will not make a deep copy of a normally sharable rtx, such as
326 (const (plus (symbol_ref) (const_int))). If such an expression appears
327 as R1 in gen_reload_chain_without_interm_reg_p, then a shared
328 rtx expression would be changed. See PR 42431. */
329
330typedef rtx *rtx_p;
331static vec<rtx_p> substitute_stack;
332
333/* Number of labels in the current function. */
334
335static int num_labels;
336
337static void replace_pseudos_in (rtx *, machine_mode, rtx);
338static void maybe_fix_stack_asms (void);
339static void copy_reloads (struct insn_chain *);
340static void calculate_needs_all_insns (int);
341static int find_reg (struct insn_chain *, int);
342static void find_reload_regs (struct insn_chain *);
343static void select_reload_regs (void);
344static void delete_caller_save_insns (void);
345
346static void spill_failure (rtx_insn *, enum reg_class);
347static void count_spilled_pseudo (int, int, int);
348static void delete_dead_insn (rtx_insn *);
349static void alter_reg (int, int, bool);
350static void set_label_offsets (rtx, rtx_insn *, int);
351static void check_eliminable_occurrences (rtx);
352static void elimination_effects (rtx, machine_mode);
353static rtx eliminate_regs_1 (rtx, machine_mode, rtx, bool, bool);
354static int eliminate_regs_in_insn (rtx_insn *, int);
355static void update_eliminable_offsets (void);
356static void mark_not_eliminable (rtx, const_rtx, void *);
357static void set_initial_elim_offsets (void);
358static bool verify_initial_elim_offsets (void);
359static void set_initial_label_offsets (void);
360static void set_offsets_for_label (rtx_insn *);
361static void init_eliminable_invariants (rtx_insn *, bool);
362static void init_elim_table (void);
363static void free_reg_equiv (void);
364static void update_eliminables (HARD_REG_SET *);
365static bool update_eliminables_and_spill (void);
366static void elimination_costs_in_insn (rtx_insn *);
367static void spill_hard_reg (unsigned int, int);
368static int finish_spills (int);
369static void scan_paradoxical_subregs (rtx);
370static void count_pseudo (int);
371static void order_regs_for_reload (struct insn_chain *);
372static void reload_as_needed (int);
373static void forget_old_reloads_1 (rtx, const_rtx, void *);
374static void forget_marked_reloads (regset);
375static int reload_reg_class_lower (const void *, const void *);
376static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
377 machine_mode);
378static void clear_reload_reg_in_use (unsigned int, int, enum reload_type,
379 machine_mode);
380static int reload_reg_free_p (unsigned int, int, enum reload_type);
381static int reload_reg_free_for_value_p (int, int, int, enum reload_type,
382 rtx, rtx, int, int);
383static int free_for_value_p (int, machine_mode, int, enum reload_type,
384 rtx, rtx, int, int);
385static int allocate_reload_reg (struct insn_chain *, int, int);
386static int conflicts_with_override (rtx);
387static void failed_reload (rtx_insn *, int);
388static int set_reload_reg (int, int);
389static void choose_reload_regs_init (struct insn_chain *, rtx *);
390static void choose_reload_regs (struct insn_chain *);
391static void emit_input_reload_insns (struct insn_chain *, struct reload *,
392 rtx, int);
393static void emit_output_reload_insns (struct insn_chain *, struct reload *,
394 int);
395static void do_input_reload (struct insn_chain *, struct reload *, int);
396static void do_output_reload (struct insn_chain *, struct reload *, int);
397static void emit_reload_insns (struct insn_chain *);
398static void delete_output_reload (rtx_insn *, int, int, rtx);
399static void delete_address_reloads (rtx_insn *, rtx_insn *);
400static void delete_address_reloads_1 (rtx_insn *, rtx, rtx_insn *);
401static void inc_for_reload (rtx, rtx, rtx, int);
402static void add_auto_inc_notes (rtx_insn *, rtx);
403static void substitute (rtx *, const_rtx, rtx);
404static bool gen_reload_chain_without_interm_reg_p (int, int);
405static int reloads_conflict (int, int);
406static rtx_insn *gen_reload (rtx, rtx, int, enum reload_type);
407static rtx_insn *emit_insn_if_valid_for_reload (rtx);
408
409/* Initialize the reload pass. This is called at the beginning of compilation
410 and may be called again if the target is reinitialized. */
411
412void
413init_reload (void)
414{
415 int i;
416
417 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
418 Set spill_indirect_levels to the number of levels such addressing is
419 permitted, zero if it is not permitted at all. */
420
421 rtx tem
422 = gen_rtx_MEM (Pmode,
423 gen_rtx_PLUS (Pmode,
424 gen_rtx_REG (Pmode,
425 LAST_VIRTUAL_REGISTER + 1),
426 gen_int_mode (4, Pmode)));
427 spill_indirect_levels = 0;
428
429 while (memory_address_p (QImode, tem))
430 {
431 spill_indirect_levels++;
432 tem = gen_rtx_MEM (Pmode, tem);
433 }
434
435 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
436
437 tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
438 indirect_symref_ok = memory_address_p (QImode, tem);
439
440 /* See if reg+reg is a valid (and offsettable) address. */
441
442 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
443 {
444 tem = gen_rtx_PLUS (Pmode,
445 gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
446 gen_rtx_REG (Pmode, i));
447
448 /* This way, we make sure that reg+reg is an offsettable address. */
449 tem = plus_constant (Pmode, tem, 4);
450
451 for (int mode = 0; mode < MAX_MACHINE_MODE; mode++)
452 if (!double_reg_address_ok[mode]
453 && memory_address_p ((enum machine_mode)mode, tem))
454 double_reg_address_ok[mode] = 1;
455 }
456
457 /* Initialize obstack for our rtl allocation. */
458 if (reload_startobj == NULL)
459 {
460 gcc_obstack_init (&reload_obstack);
461 reload_startobj = XOBNEWVAR (&reload_obstack, char, 0);
462 }
463
464 INIT_REG_SET (&spilled_pseudos);
465 INIT_REG_SET (&changed_allocation_pseudos);
466 INIT_REG_SET (&pseudos_counted);
467}
468
469/* List of insn chains that are currently unused. */
470static struct insn_chain *unused_insn_chains = 0;
471
472/* Allocate an empty insn_chain structure. */
473struct insn_chain *
474new_insn_chain (void)
475{
476 struct insn_chain *c;
477
478 if (unused_insn_chains == 0)
479 {
480 c = XOBNEW (&reload_obstack, struct insn_chain);
481 INIT_REG_SET (&c->live_throughout);
482 INIT_REG_SET (&c->dead_or_set);
483 }
484 else
485 {
486 c = unused_insn_chains;
487 unused_insn_chains = c->next;
488 }
489 c->is_caller_save_insn = 0;
490 c->need_operand_change = 0;
491 c->need_reload = 0;
492 c->need_elim = 0;
493 return c;
494}
495
496/* Small utility function to set all regs in hard reg set TO which are
497 allocated to pseudos in regset FROM. */
498
499void
500compute_use_by_pseudos (HARD_REG_SET *to, regset from)
501{
502 unsigned int regno;
503 reg_set_iterator rsi;
504
505 EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)
506 {
507 int r = reg_renumber[regno];
508
509 if (r < 0)
510 {
511 /* reload_combine uses the information from DF_LIVE_IN,
512 which might still contain registers that have not
513 actually been allocated since they have an
514 equivalence. */
515 gcc_assert (ira_conflicts_p || reload_completed);
516 }
517 else
518 add_to_hard_reg_set (to, PSEUDO_REGNO_MODE (regno), r);
519 }
520}
521
522/* Replace all pseudos found in LOC with their corresponding
523 equivalences. */
524
525static void
526replace_pseudos_in (rtx *loc, machine_mode mem_mode, rtx usage)
527{
528 rtx x = *loc;
529 enum rtx_code code;
530 const char *fmt;
531 int i, j;
532
533 if (! x)
534 return;
535
536 code = GET_CODE (x);
537 if (code == REG)
538 {
539 unsigned int regno = REGNO (x);
540
541 if (regno < FIRST_PSEUDO_REGISTER)
542 return;
543
544 x = eliminate_regs_1 (x, mem_mode, usage, true, false);
545 if (x != *loc)
546 {
547 *loc = x;
548 replace_pseudos_in (loc, mem_mode, usage);
549 return;
550 }
551
552 if (reg_equiv_constant (regno))
553 *loc = reg_equiv_constant (regno);
554 else if (reg_equiv_invariant (regno))
555 *loc = reg_equiv_invariant (regno);
556 else if (reg_equiv_mem (regno))
557 *loc = reg_equiv_mem (regno);
558 else if (reg_equiv_address (regno))
559 *loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address (regno));
560 else
561 {
562 gcc_assert (!REG_P (regno_reg_rtx[regno])
563 || REGNO (regno_reg_rtx[regno]) != regno);
564 *loc = regno_reg_rtx[regno];
565 }
566
567 return;
568 }
569 else if (code == MEM)
570 {
571 replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
572 return;
573 }
574
575 /* Process each of our operands recursively. */
576 fmt = GET_RTX_FORMAT (code);
577 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
578 if (*fmt == 'e')
579 replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
580 else if (*fmt == 'E')
581 for (j = 0; j < XVECLEN (x, i); j++)
582 replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
583}
584
585/* Determine if the current function has an exception receiver block
586 that reaches the exit block via non-exceptional edges */
587
588static bool
589has_nonexceptional_receiver (void)
590{
591 edge e;
592 edge_iterator ei;
593 basic_block *tos, *worklist, bb;
594
595 /* If we're not optimizing, then just err on the safe side. */
596 if (!optimize)
597 return true;
598
599 /* First determine which blocks can reach exit via normal paths. */
600 tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) + 1);
601
602 FOR_EACH_BB_FN (bb, cfun)
603 bb->flags &= ~BB_REACHABLE;
604
605 /* Place the exit block on our worklist. */
606 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_REACHABLE;
607 *tos++ = EXIT_BLOCK_PTR_FOR_FN (cfun);
608
609 /* Iterate: find everything reachable from what we've already seen. */
610 while (tos != worklist)
611 {
612 bb = *--tos;
613
614 FOR_EACH_EDGE (e, ei, bb->preds)
615 if (!(e->flags & EDGE_ABNORMAL))
616 {
617 basic_block src = e->src;
618
619 if (!(src->flags & BB_REACHABLE))
620 {
621 src->flags |= BB_REACHABLE;
622 *tos++ = src;
623 }
624 }
625 }
626 free (worklist);
627
628 /* Now see if there's a reachable block with an exceptional incoming
629 edge. */
630 FOR_EACH_BB_FN (bb, cfun)
631 if (bb->flags & BB_REACHABLE && bb_has_abnormal_pred (bb))
632 return true;
633
634 /* No exceptional block reached exit unexceptionally. */
635 return false;
636}
637
638/* Grow (or allocate) the REG_EQUIVS array from its current size (which may be
639 zero elements) to MAX_REG_NUM elements.
640
641 Initialize all new fields to NULL and update REG_EQUIVS_SIZE. */
642void
643grow_reg_equivs (void)
644{
645 int old_size = vec_safe_length (reg_equivs);
646 int max_regno = max_reg_num ();
647 int i;
648 reg_equivs_t ze;
649
650 memset (&ze, 0, sizeof (reg_equivs_t));
651 vec_safe_reserve (reg_equivs, max_regno);
652 for (i = old_size; i < max_regno; i++)
653 reg_equivs->quick_insert (i, ze);
654}
655
656
657/* Global variables used by reload and its subroutines. */
658
659/* The current basic block while in calculate_elim_costs_all_insns. */
660static basic_block elim_bb;
661
662/* Set during calculate_needs if an insn needs register elimination. */
663static int something_needs_elimination;
664/* Set during calculate_needs if an insn needs an operand changed. */
665static int something_needs_operands_changed;
666/* Set by alter_regs if we spilled a register to the stack. */
667static bool something_was_spilled;
668
669/* Nonzero means we couldn't get enough spill regs. */
670static int failure;
671
672/* Temporary array of pseudo-register number. */
673static int *temp_pseudo_reg_arr;
674
675/* If a pseudo has no hard reg, delete the insns that made the equivalence.
676 If that insn didn't set the register (i.e., it copied the register to
677 memory), just delete that insn instead of the equivalencing insn plus
678 anything now dead. If we call delete_dead_insn on that insn, we may
679 delete the insn that actually sets the register if the register dies
680 there and that is incorrect. */
681static void
682remove_init_insns ()
683{
684 for (int i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
685 {
686 if (reg_renumber[i] < 0 && reg_equiv_init (i) != 0)
687 {
688 rtx list;
689 for (list = reg_equiv_init (i); list; list = XEXP (list, 1))
690 {
691 rtx_insn *equiv_insn = as_a <rtx_insn *> (XEXP (list, 0));
692
693 /* If we already deleted the insn or if it may trap, we can't
694 delete it. The latter case shouldn't happen, but can
695 if an insn has a variable address, gets a REG_EH_REGION
696 note added to it, and then gets converted into a load
697 from a constant address. */
698 if (NOTE_P (equiv_insn)
699 || can_throw_internal (equiv_insn))
700 ;
701 else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
702 delete_dead_insn (equiv_insn);
703 else
704 SET_INSN_DELETED (equiv_insn);
705 }
706 }
707 }
708}
709
710/* Return true if remove_init_insns will delete INSN. */
711static bool
712will_delete_init_insn_p (rtx_insn *insn)
713{
714 rtx set = single_set (insn);
715 if (!set || !REG_P (SET_DEST (set)))
716 return false;
717 unsigned regno = REGNO (SET_DEST (set));
718
719 if (can_throw_internal (insn))
720 return false;
721
722 if (regno < FIRST_PSEUDO_REGISTER || reg_renumber[regno] >= 0)
723 return false;
724
725 for (rtx list = reg_equiv_init (regno); list; list = XEXP (list, 1))
726 {
727 rtx equiv_insn = XEXP (list, 0);
728 if (equiv_insn == insn)
729 return true;
730 }
731 return false;
732}
733
734/* Main entry point for the reload pass.
735
736 FIRST is the first insn of the function being compiled.
737
738 GLOBAL nonzero means we were called from global_alloc
739 and should attempt to reallocate any pseudoregs that we
740 displace from hard regs we will use for reloads.
741 If GLOBAL is zero, we do not have enough information to do that,
742 so any pseudo reg that is spilled must go to the stack.
743
744 Return value is TRUE if reload likely left dead insns in the
745 stream and a DCE pass should be run to elimiante them. Else the
746 return value is FALSE. */
747
748bool
749reload (rtx_insn *first, int global)
750{
751 int i, n;
752 rtx_insn *insn;
753 struct elim_table *ep;
754 basic_block bb;
755 bool inserted;
756
757 /* Make sure even insns with volatile mem refs are recognizable. */
758 init_recog ();
759
760 failure = 0;
761
762 reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
763
764 /* Make sure that the last insn in the chain
765 is not something that needs reloading. */
766 emit_note (NOTE_INSN_DELETED);
767
768 /* Enable find_equiv_reg to distinguish insns made by reload. */
769 reload_first_uid = get_max_uid ();
770
771 /* Initialize the secondary memory table. */
772 clear_secondary_mem ();
773
774 /* We don't have a stack slot for any spill reg yet. */
775 memset (spill_stack_slot, 0, sizeof spill_stack_slot);
776 memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
777
778 /* Initialize the save area information for caller-save, in case some
779 are needed. */
780 init_save_areas ();
781
782 /* Compute which hard registers are now in use
783 as homes for pseudo registers.
784 This is done here rather than (eg) in global_alloc
785 because this point is reached even if not optimizing. */
786 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
787 mark_home_live (i);
788
789 /* A function that has a nonlocal label that can reach the exit
790 block via non-exceptional paths must save all call-saved
791 registers. */
792 if (cfun->has_nonlocal_label
793 && has_nonexceptional_receiver ())
794 crtl->saves_all_registers = 1;
795
796 if (crtl->saves_all_registers)
797 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
798 if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
799 df_set_regs_ever_live (i, true);
800
801 /* Find all the pseudo registers that didn't get hard regs
802 but do have known equivalent constants or memory slots.
803 These include parameters (known equivalent to parameter slots)
804 and cse'd or loop-moved constant memory addresses.
805
806 Record constant equivalents in reg_equiv_constant
807 so they will be substituted by find_reloads.
808 Record memory equivalents in reg_mem_equiv so they can
809 be substituted eventually by altering the REG-rtx's. */
810
811 grow_reg_equivs ();
812 reg_old_renumber = XCNEWVEC (short, max_regno);
813 memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
814 pseudo_forbidden_regs = XNEWVEC (HARD_REG_SET, max_regno);
815 pseudo_previous_regs = XCNEWVEC (HARD_REG_SET, max_regno);
816
817 CLEAR_HARD_REG_SET (bad_spill_regs_global);
818
819 init_eliminable_invariants (first, true);
820 init_elim_table ();
821
822 /* Alter each pseudo-reg rtx to contain its hard reg number. Assign
823 stack slots to the pseudos that lack hard regs or equivalents.
824 Do not touch virtual registers. */
825
826 temp_pseudo_reg_arr = XNEWVEC (int, max_regno - LAST_VIRTUAL_REGISTER - 1);
827 for (n = 0, i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
828 temp_pseudo_reg_arr[n++] = i;
829
830 if (ira_conflicts_p)
831 /* Ask IRA to order pseudo-registers for better stack slot
832 sharing. */
833 ira_sort_regnos_for_alter_reg (temp_pseudo_reg_arr, n, reg_max_ref_mode);
834
835 for (i = 0; i < n; i++)
836 alter_reg (temp_pseudo_reg_arr[i], -1, false);
837
838 /* If we have some registers we think can be eliminated, scan all insns to
839 see if there is an insn that sets one of these registers to something
840 other than itself plus a constant. If so, the register cannot be
841 eliminated. Doing this scan here eliminates an extra pass through the
842 main reload loop in the most common case where register elimination
843 cannot be done. */
844 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
845 if (INSN_P (insn))
846 note_stores (PATTERN (insn), mark_not_eliminable, NULL);
847
848 maybe_fix_stack_asms ();
849
850 insns_need_reload = 0;
851 something_needs_elimination = 0;
852
853 /* Initialize to -1, which means take the first spill register. */
854 last_spill_reg = -1;
855
856 /* Spill any hard regs that we know we can't eliminate. */
857 CLEAR_HARD_REG_SET (used_spill_regs);
858 /* There can be multiple ways to eliminate a register;
859 they should be listed adjacently.
860 Elimination for any register fails only if all possible ways fail. */
861 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; )
862 {
863 int from = ep->from;
864 int can_eliminate = 0;
865 do
866 {
867 can_eliminate |= ep->can_eliminate;
868 ep++;
869 }
870 while (ep < &reg_eliminate[NUM_ELIMINABLE_REGS] && ep->from == from);
871 if (! can_eliminate)
872 spill_hard_reg (from, 1);
873 }
874
875 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER && frame_pointer_needed)
876 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
877
878 finish_spills (global);
879
880 /* From now on, we may need to generate moves differently. We may also
881 allow modifications of insns which cause them to not be recognized.
882 Any such modifications will be cleaned up during reload itself. */
883 reload_in_progress = 1;
884
885 /* This loop scans the entire function each go-round
886 and repeats until one repetition spills no additional hard regs. */
887 for (;;)
888 {
889 int something_changed;
890 HOST_WIDE_INT starting_frame_size;
891
892 starting_frame_size = get_frame_size ();
893 something_was_spilled = false;
894
895 set_initial_elim_offsets ();
896 set_initial_label_offsets ();
897
898 /* For each pseudo register that has an equivalent location defined,
899 try to eliminate any eliminable registers (such as the frame pointer)
900 assuming initial offsets for the replacement register, which
901 is the normal case.
902
903 If the resulting location is directly addressable, substitute
904 the MEM we just got directly for the old REG.
905
906 If it is not addressable but is a constant or the sum of a hard reg
907 and constant, it is probably not addressable because the constant is
908 out of range, in that case record the address; we will generate
909 hairy code to compute the address in a register each time it is
910 needed. Similarly if it is a hard register, but one that is not
911 valid as an address register.
912
913 If the location is not addressable, but does not have one of the
914 above forms, assign a stack slot. We have to do this to avoid the
915 potential of producing lots of reloads if, e.g., a location involves
916 a pseudo that didn't get a hard register and has an equivalent memory
917 location that also involves a pseudo that didn't get a hard register.
918
919 Perhaps at some point we will improve reload_when_needed handling
920 so this problem goes away. But that's very hairy. */
921
922 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
923 if (reg_renumber[i] < 0 && reg_equiv_memory_loc (i))
924 {
925 rtx x = eliminate_regs (reg_equiv_memory_loc (i), VOIDmode,
926 NULL_RTX);
927
928 if (strict_memory_address_addr_space_p
929 (GET_MODE (regno_reg_rtx[i]), XEXP (x, 0),
930 MEM_ADDR_SPACE (x)))
931 reg_equiv_mem (i) = x, reg_equiv_address (i) = 0;
932 else if (CONSTANT_P (XEXP (x, 0))
933 || (REG_P (XEXP (x, 0))
934 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
935 || (GET_CODE (XEXP (x, 0)) == PLUS
936 && REG_P (XEXP (XEXP (x, 0), 0))
937 && (REGNO (XEXP (XEXP (x, 0), 0))
938 < FIRST_PSEUDO_REGISTER)
939 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
940 reg_equiv_address (i) = XEXP (x, 0), reg_equiv_mem (i) = 0;
941 else
942 {
943 /* Make a new stack slot. Then indicate that something
944 changed so we go back and recompute offsets for
945 eliminable registers because the allocation of memory
946 below might change some offset. reg_equiv_{mem,address}
947 will be set up for this pseudo on the next pass around
948 the loop. */
949 reg_equiv_memory_loc (i) = 0;
950 reg_equiv_init (i) = 0;
951 alter_reg (i, -1, true);
952 }
953 }
954
955 if (caller_save_needed)
956 setup_save_areas ();
957
958 if (starting_frame_size && crtl->stack_alignment_needed)
959 {
960 /* If we have a stack frame, we must align it now. The
961 stack size may be a part of the offset computation for
962 register elimination. So if this changes the stack size,
963 then repeat the elimination bookkeeping. We don't
964 realign when there is no stack, as that will cause a
965 stack frame when none is needed should
966 TARGET_STARTING_FRAME_OFFSET not be already aligned to
967 STACK_BOUNDARY. */
968 assign_stack_local (BLKmode, 0, crtl->stack_alignment_needed);
969 }
970 /* If we allocated another stack slot, redo elimination bookkeeping. */
971 if (something_was_spilled || starting_frame_size != get_frame_size ())
972 {
973 if (update_eliminables_and_spill ())
974 finish_spills (0);
975 continue;
976 }
977
978 if (caller_save_needed)
979 {
980 save_call_clobbered_regs ();
981 /* That might have allocated new insn_chain structures. */
982 reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
983 }
984
985 calculate_needs_all_insns (global);
986
987 if (! ira_conflicts_p)
988 /* Don't do it for IRA. We need this info because we don't
989 change live_throughout and dead_or_set for chains when IRA
990 is used. */
991 CLEAR_REG_SET (&spilled_pseudos);
992
993 something_changed = 0;
994
995 /* If we allocated any new memory locations, make another pass
996 since it might have changed elimination offsets. */
997 if (something_was_spilled || starting_frame_size != get_frame_size ())
998 something_changed = 1;
999
1000 /* Even if the frame size remained the same, we might still have
1001 changed elimination offsets, e.g. if find_reloads called
1002 force_const_mem requiring the back end to allocate a constant
1003 pool base register that needs to be saved on the stack. */
1004 else if (!verify_initial_elim_offsets ())
1005 something_changed = 1;
1006
1007 if (update_eliminables_and_spill ())
1008 {
1009 finish_spills (0);
1010 something_changed = 1;
1011 }
1012 else
1013 {
1014 select_reload_regs ();
1015 if (failure)
1016 goto failed;
1017 if (insns_need_reload)
1018 something_changed |= finish_spills (global);
1019 }
1020
1021 if (! something_changed)
1022 break;
1023
1024 if (caller_save_needed)
1025 delete_caller_save_insns ();
1026
1027 obstack_free (&reload_obstack, reload_firstobj);
1028 }
1029
1030 /* If global-alloc was run, notify it of any register eliminations we have
1031 done. */
1032 if (global)
1033 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1034 if (ep->can_eliminate)
1035 mark_elimination (ep->from, ep->to);
1036
1037 remove_init_insns ();
1038
1039 /* Use the reload registers where necessary
1040 by generating move instructions to move the must-be-register
1041 values into or out of the reload registers. */
1042
1043 if (insns_need_reload != 0 || something_needs_elimination
1044 || something_needs_operands_changed)
1045 {
1046 HOST_WIDE_INT old_frame_size = get_frame_size ();
1047
1048 reload_as_needed (global);
1049
1050 gcc_assert (old_frame_size == get_frame_size ());
1051
1052 gcc_assert (verify_initial_elim_offsets ());
1053 }
1054
1055 /* If we were able to eliminate the frame pointer, show that it is no
1056 longer live at the start of any basic block. If it ls live by
1057 virtue of being in a pseudo, that pseudo will be marked live
1058 and hence the frame pointer will be known to be live via that
1059 pseudo. */
1060
1061 if (! frame_pointer_needed)
1062 FOR_EACH_BB_FN (bb, cfun)
1063 bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM);
1064
1065 /* Come here (with failure set nonzero) if we can't get enough spill
1066 regs. */
1067 failed:
1068
1069 CLEAR_REG_SET (&changed_allocation_pseudos);
1070 CLEAR_REG_SET (&spilled_pseudos);
1071 reload_in_progress = 0;
1072
1073 /* Now eliminate all pseudo regs by modifying them into
1074 their equivalent memory references.
1075 The REG-rtx's for the pseudos are modified in place,
1076 so all insns that used to refer to them now refer to memory.
1077
1078 For a reg that has a reg_equiv_address, all those insns
1079 were changed by reloading so that no insns refer to it any longer;
1080 but the DECL_RTL of a variable decl may refer to it,
1081 and if so this causes the debugging info to mention the variable. */
1082
1083 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1084 {
1085 rtx addr = 0;
1086
1087 if (reg_equiv_mem (i))
1088 addr = XEXP (reg_equiv_mem (i), 0);
1089
1090 if (reg_equiv_address (i))
1091 addr = reg_equiv_address (i);
1092
1093 if (addr)
1094 {
1095 if (reg_renumber[i] < 0)
1096 {
1097 rtx reg = regno_reg_rtx[i];
1098
1099 REG_USERVAR_P (reg) = 0;
1100 PUT_CODE (reg, MEM);
1101 XEXP (reg, 0) = addr;
1102 if (reg_equiv_memory_loc (i))
1103 MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc (i));
1104 else
1105 MEM_ATTRS (reg) = 0;
1106 MEM_NOTRAP_P (reg) = 1;
1107 }
1108 else if (reg_equiv_mem (i))
1109 XEXP (reg_equiv_mem (i), 0) = addr;
1110 }
1111
1112 /* We don't want complex addressing modes in debug insns
1113 if simpler ones will do, so delegitimize equivalences
1114 in debug insns. */
1115 if (MAY_HAVE_DEBUG_BIND_INSNS && reg_renumber[i] < 0)
1116 {
1117 rtx reg = regno_reg_rtx[i];
1118 rtx equiv = 0;
1119 df_ref use, next;
1120
1121 if (reg_equiv_constant (i))
1122 equiv = reg_equiv_constant (i);
1123 else if (reg_equiv_invariant (i))
1124 equiv = reg_equiv_invariant (i);
1125 else if (reg && MEM_P (reg))
1126 equiv = targetm.delegitimize_address (reg);
1127 else if (reg && REG_P (reg) && (int)REGNO (reg) != i)
1128 equiv = reg;
1129
1130 if (equiv == reg)
1131 continue;
1132
1133 for (use = DF_REG_USE_CHAIN (i); use; use = next)
1134 {
1135 insn = DF_REF_INSN (use);
1136
1137 /* Make sure the next ref is for a different instruction,
1138 so that we're not affected by the rescan. */
1139 next = DF_REF_NEXT_REG (use);
1140 while (next && DF_REF_INSN (next) == insn)
1141 next = DF_REF_NEXT_REG (next);
1142
1143 if (DEBUG_BIND_INSN_P (insn))
1144 {
1145 if (!equiv)
1146 {
1147 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
1148 df_insn_rescan_debug_internal (insn);
1149 }
1150 else
1151 INSN_VAR_LOCATION_LOC (insn)
1152 = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn),
1153 reg, equiv);
1154 }
1155 }
1156 }
1157 }
1158
1159 /* We must set reload_completed now since the cleanup_subreg_operands call
1160 below will re-recognize each insn and reload may have generated insns
1161 which are only valid during and after reload. */
1162 reload_completed = 1;
1163
1164 /* Make a pass over all the insns and delete all USEs which we inserted
1165 only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED
1166 notes. Delete all CLOBBER insns, except those that refer to the return
1167 value and the special mem:BLK CLOBBERs added to prevent the scheduler
1168 from misarranging variable-array code, and simplify (subreg (reg))
1169 operands. Strip and regenerate REG_INC notes that may have been moved
1170 around. */
1171
1172 for (insn = first; insn; insn = NEXT_INSN (insn))
1173 if (INSN_P (insn))
1174 {
1175 rtx *pnote;
1176
1177 if (CALL_P (insn))
1178 replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
1179 VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
1180
1181 if ((GET_CODE (PATTERN (insn)) == USE
1182 /* We mark with QImode USEs introduced by reload itself. */
1183 && (GET_MODE (insn) == QImode
1184 || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1185 || (GET_CODE (PATTERN (insn)) == CLOBBER
1186 && (!MEM_P (XEXP (PATTERN (insn), 0))
1187 || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1188 || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
1189 && XEXP (XEXP (PATTERN (insn), 0), 0)
1190 != stack_pointer_rtx))
1191 && (!REG_P (XEXP (PATTERN (insn), 0))
1192 || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1193 {
1194 delete_insn (insn);
1195 continue;
1196 }
1197
1198 /* Some CLOBBERs may survive until here and still reference unassigned
1199 pseudos with const equivalent, which may in turn cause ICE in later
1200 passes if the reference remains in place. */
1201 if (GET_CODE (PATTERN (insn)) == CLOBBER)
1202 replace_pseudos_in (& XEXP (PATTERN (insn), 0),
1203 VOIDmode, PATTERN (insn));
1204
1205 /* Discard obvious no-ops, even without -O. This optimization
1206 is fast and doesn't interfere with debugging. */
1207 if (NONJUMP_INSN_P (insn)
1208 && GET_CODE (PATTERN (insn)) == SET
1209 && REG_P (SET_SRC (PATTERN (insn)))
1210 && REG_P (SET_DEST (PATTERN (insn)))
1211 && (REGNO (SET_SRC (PATTERN (insn)))
1212 == REGNO (SET_DEST (PATTERN (insn)))))
1213 {
1214 delete_insn (insn);
1215 continue;
1216 }
1217
1218 pnote = &REG_NOTES (insn);
1219 while (*pnote != 0)
1220 {
1221 if (REG_NOTE_KIND (*pnote) == REG_DEAD
1222 || REG_NOTE_KIND (*pnote) == REG_UNUSED
1223 || REG_NOTE_KIND (*pnote) == REG_INC)
1224 *pnote = XEXP (*pnote, 1);
1225 else
1226 pnote = &XEXP (*pnote, 1);
1227 }
1228
1229 if (AUTO_INC_DEC)
1230 add_auto_inc_notes (insn, PATTERN (insn));
1231
1232 /* Simplify (subreg (reg)) if it appears as an operand. */
1233 cleanup_subreg_operands (insn);
1234
1235 /* Clean up invalid ASMs so that they don't confuse later passes.
1236 See PR 21299. */
1237 if (asm_noperands (PATTERN (insn)) >= 0)
1238 {
1239 extract_insn (insn);
1240 if (!constrain_operands (1, get_enabled_alternatives (insn)))
1241 {
1242 error_for_asm (insn,
1243 "%<asm%> operand has impossible constraints");
1244 delete_insn (insn);
1245 continue;
1246 }
1247 }
1248 }
1249
1250 free (temp_pseudo_reg_arr);
1251
1252 /* Indicate that we no longer have known memory locations or constants. */
1253 free_reg_equiv ();
1254
1255 free (reg_max_ref_mode);
1256 free (reg_old_renumber);
1257 free (pseudo_previous_regs);
1258 free (pseudo_forbidden_regs);
1259
1260 CLEAR_HARD_REG_SET (used_spill_regs);
1261 for (i = 0; i < n_spills; i++)
1262 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1263
1264 /* Free all the insn_chain structures at once. */
1265 obstack_free (&reload_obstack, reload_startobj);
1266 unused_insn_chains = 0;
1267
1268 inserted = fixup_abnormal_edges ();
1269
1270 /* We've possibly turned single trapping insn into multiple ones. */
1271 if (cfun->can_throw_non_call_exceptions)
1272 {
1273 auto_sbitmap blocks (last_basic_block_for_fn (cfun));
1274 bitmap_ones (blocks);
1275 find_many_sub_basic_blocks (blocks);
1276 }
1277
1278 if (inserted)
1279 commit_edge_insertions ();
1280
1281 /* Replacing pseudos with their memory equivalents might have
1282 created shared rtx. Subsequent passes would get confused
1283 by this, so unshare everything here. */
1284 unshare_all_rtl_again (first);
1285
1286#ifdef STACK_BOUNDARY
1287 /* init_emit has set the alignment of the hard frame pointer
1288 to STACK_BOUNDARY. It is very likely no longer valid if
1289 the hard frame pointer was used for register allocation. */
1290 if (!frame_pointer_needed)
1291 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
1292#endif
1293
1294 substitute_stack.release ();
1295
1296 gcc_assert (bitmap_empty_p (&spilled_pseudos));
1297
1298 reload_completed = !failure;
1299
1300 return need_dce;
1301}
1302
1303/* Yet another special case. Unfortunately, reg-stack forces people to
1304 write incorrect clobbers in asm statements. These clobbers must not
1305 cause the register to appear in bad_spill_regs, otherwise we'll call
1306 fatal_insn later. We clear the corresponding regnos in the live
1307 register sets to avoid this.
1308 The whole thing is rather sick, I'm afraid. */
1309
1310static void
1311maybe_fix_stack_asms (void)
1312{
1313#ifdef STACK_REGS
1314 const char *constraints[MAX_RECOG_OPERANDS];
1315 machine_mode operand_mode[MAX_RECOG_OPERANDS];
1316 struct insn_chain *chain;
1317
1318 for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1319 {
1320 int i, noperands;
1321 HARD_REG_SET clobbered, allowed;
1322 rtx pat;
1323
1324 if (! INSN_P (chain->insn)
1325 || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1326 continue;
1327 pat = PATTERN (chain->insn);
1328 if (GET_CODE (pat) != PARALLEL)
1329 continue;
1330
1331 CLEAR_HARD_REG_SET (clobbered);
1332 CLEAR_HARD_REG_SET (allowed);
1333
1334 /* First, make a mask of all stack regs that are clobbered. */
1335 for (i = 0; i < XVECLEN (pat, 0); i++)
1336 {
1337 rtx t = XVECEXP (pat, 0, i);
1338 if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1339 SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1340 }
1341
1342 /* Get the operand values and constraints out of the insn. */
1343 decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1344 constraints, operand_mode, NULL);
1345
1346 /* For every operand, see what registers are allowed. */
1347 for (i = 0; i < noperands; i++)
1348 {
1349 const char *p = constraints[i];
1350 /* For every alternative, we compute the class of registers allowed
1351 for reloading in CLS, and merge its contents into the reg set
1352 ALLOWED. */
1353 int cls = (int) NO_REGS;
1354
1355 for (;;)
1356 {
1357 char c = *p;
1358
1359 if (c == '\0' || c == ',' || c == '#')
1360 {
1361 /* End of one alternative - mark the regs in the current
1362 class, and reset the class. */
1363 IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1364 cls = NO_REGS;
1365 p++;
1366 if (c == '#')
1367 do {
1368 c = *p++;
1369 } while (c != '\0' && c != ',');
1370 if (c == '\0')
1371 break;
1372 continue;
1373 }
1374
1375 switch (c)
1376 {
1377 case 'g':
1378 cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1379 break;
1380
1381 default:
1382 enum constraint_num cn = lookup_constraint (p);
1383 if (insn_extra_address_constraint (cn))
1384 cls = (int) reg_class_subunion[cls]
1385 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
1386 ADDRESS, SCRATCH)];
1387 else
1388 cls = (int) reg_class_subunion[cls]
1389 [reg_class_for_constraint (cn)];
1390 break;
1391 }
1392 p += CONSTRAINT_LEN (c, p);
1393 }
1394 }
1395 /* Those of the registers which are clobbered, but allowed by the
1396 constraints, must be usable as reload registers. So clear them
1397 out of the life information. */
1398 AND_HARD_REG_SET (allowed, clobbered);
1399 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1400 if (TEST_HARD_REG_BIT (allowed, i))
1401 {
1402 CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1403 CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1404 }
1405 }
1406
1407#endif
1408}
1409
1410/* Copy the global variables n_reloads and rld into the corresponding elts
1411 of CHAIN. */
1412static void
1413copy_reloads (struct insn_chain *chain)
1414{
1415 chain->n_reloads = n_reloads;
1416 chain->rld = XOBNEWVEC (&reload_obstack, struct reload, n_reloads);
1417 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1418 reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1419}
1420
1421/* Walk the chain of insns, and determine for each whether it needs reloads
1422 and/or eliminations. Build the corresponding insns_need_reload list, and
1423 set something_needs_elimination as appropriate. */
1424static void
1425calculate_needs_all_insns (int global)
1426{
1427 struct insn_chain **pprev_reload = &insns_need_reload;
1428 struct insn_chain *chain, *next = 0;
1429
1430 something_needs_elimination = 0;
1431
1432 reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1433 for (chain = reload_insn_chain; chain != 0; chain = next)
1434 {
1435 rtx_insn *insn = chain->insn;
1436
1437 next = chain->next;
1438
1439 /* Clear out the shortcuts. */
1440 chain->n_reloads = 0;
1441 chain->need_elim = 0;
1442 chain->need_reload = 0;
1443 chain->need_operand_change = 0;
1444
1445 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1446 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1447 what effects this has on the known offsets at labels. */
1448
1449 if (LABEL_P (insn) || JUMP_P (insn) || JUMP_TABLE_DATA_P (insn)
1450 || (INSN_P (insn) && REG_NOTES (insn) != 0))
1451 set_label_offsets (insn, insn, 0);
1452
1453 if (INSN_P (insn))
1454 {
1455 rtx old_body = PATTERN (insn);
1456 int old_code = INSN_CODE (insn);
1457 rtx old_notes = REG_NOTES (insn);
1458 int did_elimination = 0;
1459 int operands_changed = 0;
1460
1461 /* Skip insns that only set an equivalence. */
1462 if (will_delete_init_insn_p (insn))
1463 continue;
1464
1465 /* If needed, eliminate any eliminable registers. */
1466 if (num_eliminable || num_eliminable_invariants)
1467 did_elimination = eliminate_regs_in_insn (insn, 0);
1468
1469 /* Analyze the instruction. */
1470 operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1471 global, spill_reg_order);
1472
1473 /* If a no-op set needs more than one reload, this is likely
1474 to be something that needs input address reloads. We
1475 can't get rid of this cleanly later, and it is of no use
1476 anyway, so discard it now.
1477 We only do this when expensive_optimizations is enabled,
1478 since this complements reload inheritance / output
1479 reload deletion, and it can make debugging harder. */
1480 if (flag_expensive_optimizations && n_reloads > 1)
1481 {
1482 rtx set = single_set (insn);
1483 if (set
1484 &&
1485 ((SET_SRC (set) == SET_DEST (set)
1486 && REG_P (SET_SRC (set))
1487 && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1488 || (REG_P (SET_SRC (set)) && REG_P (SET_DEST (set))
1489 && reg_renumber[REGNO (SET_SRC (set))] < 0
1490 && reg_renumber[REGNO (SET_DEST (set))] < 0
1491 && reg_equiv_memory_loc (REGNO (SET_SRC (set))) != NULL
1492 && reg_equiv_memory_loc (REGNO (SET_DEST (set))) != NULL
1493 && rtx_equal_p (reg_equiv_memory_loc (REGNO (SET_SRC (set))),
1494 reg_equiv_memory_loc (REGNO (SET_DEST (set)))))))
1495 {
1496 if (ira_conflicts_p)
1497 /* Inform IRA about the insn deletion. */
1498 ira_mark_memory_move_deletion (REGNO (SET_DEST (set)),
1499 REGNO (SET_SRC (set)));
1500 delete_insn (insn);
1501 /* Delete it from the reload chain. */
1502 if (chain->prev)
1503 chain->prev->next = next;
1504 else
1505 reload_insn_chain = next;
1506 if (next)
1507 next->prev = chain->prev;
1508 chain->next = unused_insn_chains;
1509 unused_insn_chains = chain;
1510 continue;
1511 }
1512 }
1513 if (num_eliminable)
1514 update_eliminable_offsets ();
1515
1516 /* Remember for later shortcuts which insns had any reloads or
1517 register eliminations. */
1518 chain->need_elim = did_elimination;
1519 chain->need_reload = n_reloads > 0;
1520 chain->need_operand_change = operands_changed;
1521
1522 /* Discard any register replacements done. */
1523 if (did_elimination)
1524 {
1525 obstack_free (&reload_obstack, reload_insn_firstobj);
1526 PATTERN (insn) = old_body;
1527 INSN_CODE (insn) = old_code;
1528 REG_NOTES (insn) = old_notes;
1529 something_needs_elimination = 1;
1530 }
1531
1532 something_needs_operands_changed |= operands_changed;
1533
1534 if (n_reloads != 0)
1535 {
1536 copy_reloads (chain);
1537 *pprev_reload = chain;
1538 pprev_reload = &chain->next_need_reload;
1539 }
1540 }
1541 }
1542 *pprev_reload = 0;
1543}
1544
1545/* This function is called from the register allocator to set up estimates
1546 for the cost of eliminating pseudos which have REG_EQUIV equivalences to
1547 an invariant. The structure is similar to calculate_needs_all_insns. */
1548
1549void
1550calculate_elim_costs_all_insns (void)
1551{
1552 int *reg_equiv_init_cost;
1553 basic_block bb;
1554 int i;
1555
1556 reg_equiv_init_cost = XCNEWVEC (int, max_regno);
1557 init_elim_table ();
1558 init_eliminable_invariants (get_insns (), false);
1559
1560 set_initial_elim_offsets ();
1561 set_initial_label_offsets ();
1562
1563 FOR_EACH_BB_FN (bb, cfun)
1564 {
1565 rtx_insn *insn;
1566 elim_bb = bb;
1567
1568 FOR_BB_INSNS (bb, insn)
1569 {
1570 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1571 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1572 what effects this has on the known offsets at labels. */
1573
1574 if (LABEL_P (insn) || JUMP_P (insn) || JUMP_TABLE_DATA_P (insn)
1575 || (INSN_P (insn) && REG_NOTES (insn) != 0))
1576 set_label_offsets (insn, insn, 0);
1577
1578 if (INSN_P (insn))
1579 {
1580 rtx set = single_set (insn);
1581
1582 /* Skip insns that only set an equivalence. */
1583 if (set && REG_P (SET_DEST (set))
1584 && reg_renumber[REGNO (SET_DEST (set))] < 0
1585 && (reg_equiv_constant (REGNO (SET_DEST (set)))
1586 || reg_equiv_invariant (REGNO (SET_DEST (set)))))
1587 {
1588 unsigned regno = REGNO (SET_DEST (set));
1589 rtx_insn_list *init = reg_equiv_init (regno);
1590 if (init)
1591 {
1592 rtx t = eliminate_regs_1 (SET_SRC (set), VOIDmode, insn,
1593 false, true);
1594 machine_mode mode = GET_MODE (SET_DEST (set));
1595 int cost = set_src_cost (t, mode,
1596 optimize_bb_for_speed_p (bb));
1597 int freq = REG_FREQ_FROM_BB (bb);
1598
1599 reg_equiv_init_cost[regno] = cost * freq;
1600 continue;
1601 }
1602 }
1603 /* If needed, eliminate any eliminable registers. */
1604 if (num_eliminable || num_eliminable_invariants)
1605 elimination_costs_in_insn (insn);
1606
1607 if (num_eliminable)
1608 update_eliminable_offsets ();
1609 }
1610 }
1611 }
1612 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1613 {
1614 if (reg_equiv_invariant (i))
1615 {
1616 if (reg_equiv_init (i))
1617 {
1618 int cost = reg_equiv_init_cost[i];
1619 if (dump_file)
1620 fprintf (dump_file,
1621 "Reg %d has equivalence, initial gains %d\n", i, cost);
1622 if (cost != 0)
1623 ira_adjust_equiv_reg_cost (i, cost);
1624 }
1625 else
1626 {
1627 if (dump_file)
1628 fprintf (dump_file,
1629 "Reg %d had equivalence, but can't be eliminated\n",
1630 i);
1631 ira_adjust_equiv_reg_cost (i, 0);
1632 }
1633 }
1634 }
1635
1636 free (reg_equiv_init_cost);
1637 free (offsets_known_at);
1638 free (offsets_at);
1639 offsets_at = NULL;
1640 offsets_known_at = NULL;
1641}
1642
1643/* Comparison function for qsort to decide which of two reloads
1644 should be handled first. *P1 and *P2 are the reload numbers. */
1645
1646static int
1647reload_reg_class_lower (const void *r1p, const void *r2p)
1648{
1649 int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1650 int t;
1651
1652 /* Consider required reloads before optional ones. */
1653 t = rld[r1].optional - rld[r2].optional;
1654 if (t != 0)
1655 return t;
1656
1657 /* Count all solitary classes before non-solitary ones. */
1658 t = ((reg_class_size[(int) rld[r2].rclass] == 1)
1659 - (reg_class_size[(int) rld[r1].rclass] == 1));
1660 if (t != 0)
1661 return t;
1662
1663 /* Aside from solitaires, consider all multi-reg groups first. */
1664 t = rld[r2].nregs - rld[r1].nregs;
1665 if (t != 0)
1666 return t;
1667
1668 /* Consider reloads in order of increasing reg-class number. */
1669 t = (int) rld[r1].rclass - (int) rld[r2].rclass;
1670 if (t != 0)
1671 return t;
1672
1673 /* If reloads are equally urgent, sort by reload number,
1674 so that the results of qsort leave nothing to chance. */
1675 return r1 - r2;
1676}
1677
1678/* The cost of spilling each hard reg. */
1679static int spill_cost[FIRST_PSEUDO_REGISTER];
1680
1681/* When spilling multiple hard registers, we use SPILL_COST for the first
1682 spilled hard reg and SPILL_ADD_COST for subsequent regs. SPILL_ADD_COST
1683 only the first hard reg for a multi-reg pseudo. */
1684static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1685
1686/* Map of hard regno to pseudo regno currently occupying the hard
1687 reg. */
1688static int hard_regno_to_pseudo_regno[FIRST_PSEUDO_REGISTER];
1689
1690/* Update the spill cost arrays, considering that pseudo REG is live. */
1691
1692static void
1693count_pseudo (int reg)
1694{
1695 int freq = REG_FREQ (reg);
1696 int r = reg_renumber[reg];
1697 int nregs;
1698
1699 /* Ignore spilled pseudo-registers which can be here only if IRA is used. */
1700 if (ira_conflicts_p && r < 0)
1701 return;
1702
1703 if (REGNO_REG_SET_P (&pseudos_counted, reg)
1704 || REGNO_REG_SET_P (&spilled_pseudos, reg))
1705 return;
1706
1707 SET_REGNO_REG_SET (&pseudos_counted, reg);
1708
1709 gcc_assert (r >= 0);
1710
1711 spill_add_cost[r] += freq;
1712 nregs = hard_regno_nregs (r, PSEUDO_REGNO_MODE (reg));
1713 while (nregs-- > 0)
1714 {
1715 hard_regno_to_pseudo_regno[r + nregs] = reg;
1716 spill_cost[r + nregs] += freq;
1717 }
1718}
1719
1720/* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1721 contents of BAD_SPILL_REGS for the insn described by CHAIN. */
1722
1723static void
1724order_regs_for_reload (struct insn_chain *chain)
1725{
1726 unsigned i;
1727 HARD_REG_SET used_by_pseudos;
1728 HARD_REG_SET used_by_pseudos2;
1729 reg_set_iterator rsi;
1730
1731 COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
1732
1733 memset (spill_cost, 0, sizeof spill_cost);
1734 memset (spill_add_cost, 0, sizeof spill_add_cost);
1735 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1736 hard_regno_to_pseudo_regno[i] = -1;
1737
1738 /* Count number of uses of each hard reg by pseudo regs allocated to it
1739 and then order them by decreasing use. First exclude hard registers
1740 that are live in or across this insn. */
1741
1742 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1743 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1744 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
1745 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
1746
1747 /* Now find out which pseudos are allocated to it, and update
1748 hard_reg_n_uses. */
1749 CLEAR_REG_SET (&pseudos_counted);
1750
1751 EXECUTE_IF_SET_IN_REG_SET
1752 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
1753 {
1754 count_pseudo (i);
1755 }
1756 EXECUTE_IF_SET_IN_REG_SET
1757 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
1758 {
1759 count_pseudo (i);
1760 }
1761 CLEAR_REG_SET (&pseudos_counted);
1762}
1763
1764/* Vector of reload-numbers showing the order in which the reloads should
1765 be processed. */
1766static short reload_order[MAX_RELOADS];
1767
1768/* This is used to keep track of the spill regs used in one insn. */
1769static HARD_REG_SET used_spill_regs_local;
1770
1771/* We decided to spill hard register SPILLED, which has a size of
1772 SPILLED_NREGS. Determine how pseudo REG, which is live during the insn,
1773 is affected. We will add it to SPILLED_PSEUDOS if necessary, and we will
1774 update SPILL_COST/SPILL_ADD_COST. */
1775
1776static void
1777count_spilled_pseudo (int spilled, int spilled_nregs, int reg)
1778{
1779 int freq = REG_FREQ (reg);
1780 int r = reg_renumber[reg];
1781 int nregs;
1782
1783 /* Ignore spilled pseudo-registers which can be here only if IRA is used. */
1784 if (ira_conflicts_p && r < 0)
1785 return;
1786
1787 gcc_assert (r >= 0);
1788
1789 nregs = hard_regno_nregs (r, PSEUDO_REGNO_MODE (reg));
1790
1791 if (REGNO_REG_SET_P (&spilled_pseudos, reg)
1792 || spilled + spilled_nregs <= r || r + nregs <= spilled)
1793 return;
1794
1795 SET_REGNO_REG_SET (&spilled_pseudos, reg);
1796
1797 spill_add_cost[r] -= freq;
1798 while (nregs-- > 0)
1799 {
1800 hard_regno_to_pseudo_regno[r + nregs] = -1;
1801 spill_cost[r + nregs] -= freq;
1802 }
1803}
1804
1805/* Find reload register to use for reload number ORDER. */
1806
1807static int
1808find_reg (struct insn_chain *chain, int order)
1809{
1810 int rnum = reload_order[order];
1811 struct reload *rl = rld + rnum;
1812 int best_cost = INT_MAX;
1813 int best_reg = -1;
1814 unsigned int i, j, n;
1815 int k;
1816 HARD_REG_SET not_usable;
1817 HARD_REG_SET used_by_other_reload;
1818 reg_set_iterator rsi;
1819 static int regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1820 static int best_regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1821
1822 COPY_HARD_REG_SET (not_usable, bad_spill_regs);
1823 IOR_HARD_REG_SET (not_usable, bad_spill_regs_global);
1824 IOR_COMPL_HARD_REG_SET (not_usable, reg_class_contents[rl->rclass]);
1825
1826 CLEAR_HARD_REG_SET (used_by_other_reload);
1827 for (k = 0; k < order; k++)
1828 {
1829 int other = reload_order[k];
1830
1831 if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1832 for (j = 0; j < rld[other].nregs; j++)
1833 SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1834 }
1835
1836 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1837 {
1838#ifdef REG_ALLOC_ORDER
1839 unsigned int regno = reg_alloc_order[i];
1840#else
1841 unsigned int regno = i;
1842#endif
1843
1844 if (! TEST_HARD_REG_BIT (not_usable, regno)
1845 && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1846 && targetm.hard_regno_mode_ok (regno, rl->mode))
1847 {
1848 int this_cost = spill_cost[regno];
1849 int ok = 1;
1850 unsigned int this_nregs = hard_regno_nregs (regno, rl->mode);
1851
1852 for (j = 1; j < this_nregs; j++)
1853 {
1854 this_cost += spill_add_cost[regno + j];
1855 if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1856 || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1857 ok = 0;
1858 }
1859 if (! ok)
1860 continue;
1861
1862 if (ira_conflicts_p)
1863 {
1864 /* Ask IRA to find a better pseudo-register for
1865 spilling. */
1866 for (n = j = 0; j < this_nregs; j++)
1867 {
1868 int r = hard_regno_to_pseudo_regno[regno + j];
1869
1870 if (r < 0)
1871 continue;
1872 if (n == 0 || regno_pseudo_regs[n - 1] != r)
1873 regno_pseudo_regs[n++] = r;
1874 }
1875 regno_pseudo_regs[n++] = -1;
1876 if (best_reg < 0
1877 || ira_better_spill_reload_regno_p (regno_pseudo_regs,
1878 best_regno_pseudo_regs,
1879 rl->in, rl->out,
1880 chain->insn))
1881 {
1882 best_reg = regno;
1883 for (j = 0;; j++)
1884 {
1885 best_regno_pseudo_regs[j] = regno_pseudo_regs[j];
1886 if (regno_pseudo_regs[j] < 0)
1887 break;
1888 }
1889 }
1890 continue;
1891 }
1892
1893 if (rl->in && REG_P (rl->in) && REGNO (rl->in) == regno)
1894 this_cost--;
1895 if (rl->out && REG_P (rl->out) && REGNO (rl->out) == regno)
1896 this_cost--;
1897 if (this_cost < best_cost
1898 /* Among registers with equal cost, prefer caller-saved ones, or
1899 use REG_ALLOC_ORDER if it is defined. */
1900 || (this_cost == best_cost
1901#ifdef REG_ALLOC_ORDER
1902 && (inv_reg_alloc_order[regno]
1903 < inv_reg_alloc_order[best_reg])
1904#else
1905 && call_used_regs[regno]
1906 && ! call_used_regs[best_reg]
1907#endif
1908 ))
1909 {
1910 best_reg = regno;
1911 best_cost = this_cost;
1912 }
1913 }
1914 }
1915 if (best_reg == -1)
1916 return 0;
1917
1918 if (dump_file)
1919 fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
1920
1921 rl->nregs = hard_regno_nregs (best_reg, rl->mode);
1922 rl->regno = best_reg;
1923
1924 EXECUTE_IF_SET_IN_REG_SET
1925 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j, rsi)
1926 {
1927 count_spilled_pseudo (best_reg, rl->nregs, j);
1928 }
1929
1930 EXECUTE_IF_SET_IN_REG_SET
1931 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j, rsi)
1932 {
1933 count_spilled_pseudo (best_reg, rl->nregs, j);
1934 }
1935
1936 for (i = 0; i < rl->nregs; i++)
1937 {
1938 gcc_assert (spill_cost[best_reg + i] == 0);
1939 gcc_assert (spill_add_cost[best_reg + i] == 0);
1940 gcc_assert (hard_regno_to_pseudo_regno[best_reg + i] == -1);
1941 SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
1942 }
1943 return 1;
1944}
1945
1946/* Find more reload regs to satisfy the remaining need of an insn, which
1947 is given by CHAIN.
1948 Do it by ascending class number, since otherwise a reg
1949 might be spilled for a big class and might fail to count
1950 for a smaller class even though it belongs to that class. */
1951
1952static void
1953find_reload_regs (struct insn_chain *chain)
1954{
1955 int i;
1956
1957 /* In order to be certain of getting the registers we need,
1958 we must sort the reloads into order of increasing register class.
1959 Then our grabbing of reload registers will parallel the process
1960 that provided the reload registers. */
1961 for (i = 0; i < chain->n_reloads; i++)
1962 {
1963 /* Show whether this reload already has a hard reg. */
1964 if (chain->rld[i].reg_rtx)
1965 {
1966 chain->rld[i].regno = REGNO (chain->rld[i].reg_rtx);
1967 chain->rld[i].nregs = REG_NREGS (chain->rld[i].reg_rtx);
1968 }
1969 else
1970 chain->rld[i].regno = -1;
1971 reload_order[i] = i;
1972 }
1973
1974 n_reloads = chain->n_reloads;
1975 memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
1976
1977 CLEAR_HARD_REG_SET (used_spill_regs_local);
1978
1979 if (dump_file)
1980 fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
1981
1982 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
1983
1984 /* Compute the order of preference for hard registers to spill. */
1985
1986 order_regs_for_reload (chain);
1987
1988 for (i = 0; i < n_reloads; i++)
1989 {
1990 int r = reload_order[i];
1991
1992 /* Ignore reloads that got marked inoperative. */
1993 if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
1994 && ! rld[r].optional
1995 && rld[r].regno == -1)
1996 if (! find_reg (chain, i))
1997 {
1998 if (dump_file)
1999 fprintf (dump_file, "reload failure for reload %d\n", r);
2000 spill_failure (chain->insn, rld[r].rclass);
2001 failure = 1;
2002 return;
2003 }
2004 }
2005
2006 COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
2007 IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
2008
2009 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
2010}
2011
2012static void
2013select_reload_regs (void)
2014{
2015 struct insn_chain *chain;
2016
2017 /* Try to satisfy the needs for each insn. */
2018 for (chain = insns_need_reload; chain != 0;
2019 chain = chain->next_need_reload)
2020 find_reload_regs (chain);
2021}
2022
2023/* Delete all insns that were inserted by emit_caller_save_insns during
2024 this iteration. */
2025static void
2026delete_caller_save_insns (void)
2027{
2028 struct insn_chain *c = reload_insn_chain;
2029
2030 while (c != 0)
2031 {
2032 while (c != 0 && c->is_caller_save_insn)
2033 {
2034 struct insn_chain *next = c->next;
2035 rtx_insn *insn = c->insn;
2036
2037 if (c == reload_insn_chain)
2038 reload_insn_chain = next;
2039 delete_insn (insn);
2040
2041 if (next)
2042 next->prev = c->prev;
2043 if (c->prev)
2044 c->prev->next = next;
2045 c->next = unused_insn_chains;
2046 unused_insn_chains = c;
2047 c = next;
2048 }
2049 if (c != 0)
2050 c = c->next;
2051 }
2052}
2053
2054/* Handle the failure to find a register to spill.
2055 INSN should be one of the insns which needed this particular spill reg. */
2056
2057static void
2058spill_failure (rtx_insn *insn, enum reg_class rclass)
2059{
2060 if (asm_noperands (PATTERN (insn)) >= 0)
2061 error_for_asm (insn, "can%'t find a register in class %qs while "
2062 "reloading %<asm%>",
2063 reg_class_names[rclass]);
2064 else
2065 {
2066 error ("unable to find a register to spill in class %qs",
2067 reg_class_names[rclass]);
2068
2069 if (dump_file)
2070 {
2071 fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
2072 debug_reload_to_stream (dump_file);
2073 }
2074 fatal_insn ("this is the insn:", insn);
2075 }
2076}
2077
2078/* Delete an unneeded INSN and any previous insns who sole purpose is loading
2079 data that is dead in INSN. */
2080
2081static void
2082delete_dead_insn (rtx_insn *insn)
2083{
2084 rtx_insn *prev = prev_active_insn (insn);
2085 rtx prev_dest;
2086
2087 /* If the previous insn sets a register that dies in our insn make
2088 a note that we want to run DCE immediately after reload.
2089
2090 We used to delete the previous insn & recurse, but that's wrong for
2091 block local equivalences. Instead of trying to figure out the exact
2092 circumstances where we can delete the potentially dead insns, just
2093 let DCE do the job. */
2094 if (prev && BLOCK_FOR_INSN (prev) == BLOCK_FOR_INSN (insn)
2095 && GET_CODE (PATTERN (prev)) == SET
2096 && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
2097 && reg_mentioned_p (prev_dest, PATTERN (insn))
2098 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2099 && ! side_effects_p (SET_SRC (PATTERN (prev))))
2100 need_dce = 1;
2101
2102 SET_INSN_DELETED (insn);
2103}
2104
2105/* Modify the home of pseudo-reg I.
2106 The new home is present in reg_renumber[I].
2107
2108 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2109 or it may be -1, meaning there is none or it is not relevant.
2110 This is used so that all pseudos spilled from a given hard reg
2111 can share one stack slot. */
2112
2113static void
2114alter_reg (int i, int from_reg, bool dont_share_p)
2115{
2116 /* When outputting an inline function, this can happen
2117 for a reg that isn't actually used. */
2118 if (regno_reg_rtx[i] == 0)
2119 return;
2120
2121 /* If the reg got changed to a MEM at rtl-generation time,
2122 ignore it. */
2123 if (!REG_P (regno_reg_rtx[i]))
2124 return;
2125
2126 /* Modify the reg-rtx to contain the new hard reg
2127 number or else to contain its pseudo reg number. */
2128 SET_REGNO (regno_reg_rtx[i],
2129 reg_renumber[i] >= 0 ? reg_renumber[i] : i);
2130
2131 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2132 allocate a stack slot for it. */
2133
2134 if (reg_renumber[i] < 0
2135 && REG_N_REFS (i) > 0
2136 && reg_equiv_constant (i) == 0
2137 && (reg_equiv_invariant (i) == 0
2138 || reg_equiv_init (i) == 0)
2139 && reg_equiv_memory_loc (i) == 0)
2140 {
2141 rtx x = NULL_RTX;
2142 machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2143 unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
2144 unsigned int inherent_align = GET_MODE_ALIGNMENT (mode);
2145 machine_mode wider_mode = wider_subreg_mode (mode, reg_max_ref_mode[i]);
2146 unsigned int total_size = GET_MODE_SIZE (wider_mode);
2147 unsigned int min_align = GET_MODE_BITSIZE (reg_max_ref_mode[i]);
2148 int adjust = 0;
2149
2150 something_was_spilled = true;
2151
2152 if (ira_conflicts_p)
2153 {
2154 /* Mark the spill for IRA. */
2155 SET_REGNO_REG_SET (&spilled_pseudos, i);
2156 if (!dont_share_p)
2157 x = ira_reuse_stack_slot (i, inherent_size, total_size);
2158 }
2159
2160 if (x)
2161 ;
2162
2163 /* Each pseudo reg has an inherent size which comes from its own mode,
2164 and a total size which provides room for paradoxical subregs
2165 which refer to the pseudo reg in wider modes.
2166
2167 We can use a slot already allocated if it provides both
2168 enough inherent space and enough total space.
2169 Otherwise, we allocate a new slot, making sure that it has no less
2170 inherent space, and no less total space, then the previous slot. */
2171 else if (from_reg == -1 || (!dont_share_p && ira_conflicts_p))
2172 {
2173 rtx stack_slot;
2174
2175 /* No known place to spill from => no slot to reuse. */
2176 x = assign_stack_local (mode, total_size,
2177 min_align > inherent_align
2178 || total_size > inherent_size ? -1 : 0);
2179
2180 stack_slot = x;
2181
2182 /* Cancel the big-endian correction done in assign_stack_local.
2183 Get the address of the beginning of the slot. This is so we
2184 can do a big-endian correction unconditionally below. */
2185 if (BYTES_BIG_ENDIAN)
2186 {
2187 adjust = inherent_size - total_size;
2188 if (adjust)
2189 {
2190 unsigned int total_bits = total_size * BITS_PER_UNIT;
2191 machine_mode mem_mode
2192 = int_mode_for_size (total_bits, 1).else_blk ();
2193 stack_slot = adjust_address_nv (x, mem_mode, adjust);
2194 }
2195 }
2196
2197 if (! dont_share_p && ira_conflicts_p)
2198 /* Inform IRA about allocation a new stack slot. */
2199 ira_mark_new_stack_slot (stack_slot, i, total_size);
2200 }
2201
2202 /* Reuse a stack slot if possible. */
2203 else if (spill_stack_slot[from_reg] != 0
2204 && spill_stack_slot_width[from_reg] >= total_size
2205 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2206 >= inherent_size)
2207 && MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align)
2208 x = spill_stack_slot[from_reg];
2209
2210 /* Allocate a bigger slot. */
2211 else
2212 {
2213 /* Compute maximum size needed, both for inherent size
2214 and for total size. */
2215 rtx stack_slot;
2216
2217 if (spill_stack_slot[from_reg])
2218 {
2219 if (partial_subreg_p (mode,
2220 GET_MODE (spill_stack_slot[from_reg])))
2221 mode = GET_MODE (spill_stack_slot[from_reg]);
2222 if (spill_stack_slot_width[from_reg] > total_size)
2223 total_size = spill_stack_slot_width[from_reg];
2224 if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align)
2225 min_align = MEM_ALIGN (spill_stack_slot[from_reg]);
2226 }
2227
2228 /* Make a slot with that size. */
2229 x = assign_stack_local (mode, total_size,
2230 min_align > inherent_align
2231 || total_size > inherent_size ? -1 : 0);
2232 stack_slot = x;
2233
2234 /* Cancel the big-endian correction done in assign_stack_local.
2235 Get the address of the beginning of the slot. This is so we
2236 can do a big-endian correction unconditionally below. */
2237 if (BYTES_BIG_ENDIAN)
2238 {
2239 adjust = GET_MODE_SIZE (mode) - total_size;
2240 if (adjust)
2241 {
2242 unsigned int total_bits = total_size * BITS_PER_UNIT;
2243 machine_mode mem_mode
2244 = int_mode_for_size (total_bits, 1).else_blk ();
2245 stack_slot = adjust_address_nv (x, mem_mode, adjust);
2246 }
2247 }
2248
2249 spill_stack_slot[from_reg] = stack_slot;
2250 spill_stack_slot_width[from_reg] = total_size;
2251 }
2252
2253 /* On a big endian machine, the "address" of the slot
2254 is the address of the low part that fits its inherent mode. */
2255 adjust += subreg_size_lowpart_offset (inherent_size, total_size);
2256
2257 /* If we have any adjustment to make, or if the stack slot is the
2258 wrong mode, make a new stack slot. */
2259 x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2260
2261 /* Set all of the memory attributes as appropriate for a spill. */
2262 set_mem_attrs_for_spill (x);
2263
2264 /* Save the stack slot for later. */
2265 reg_equiv_memory_loc (i) = x;
2266 }
2267}
2268
2269/* Mark the slots in regs_ever_live for the hard regs used by
2270 pseudo-reg number REGNO, accessed in MODE. */
2271
2272static void
2273mark_home_live_1 (int regno, machine_mode mode)
2274{
2275 int i, lim;
2276
2277 i = reg_renumber[regno];
2278 if (i < 0)
2279 return;
2280 lim = end_hard_regno (mode, i);
2281 while (i < lim)
2282 df_set_regs_ever_live (i++, true);
2283}
2284
2285/* Mark the slots in regs_ever_live for the hard regs
2286 used by pseudo-reg number REGNO. */
2287
2288void
2289mark_home_live (int regno)
2290{
2291 if (reg_renumber[regno] >= 0)
2292 mark_home_live_1 (regno, PSEUDO_REGNO_MODE (regno));
2293}
2294
2295/* This function handles the tracking of elimination offsets around branches.
2296
2297 X is a piece of RTL being scanned.
2298
2299 INSN is the insn that it came from, if any.
2300
2301 INITIAL_P is nonzero if we are to set the offset to be the initial
2302 offset and zero if we are setting the offset of the label to be the
2303 current offset. */
2304
2305static void
2306set_label_offsets (rtx x, rtx_insn *insn, int initial_p)
2307{
2308 enum rtx_code code = GET_CODE (x);
2309 rtx tem;
2310 unsigned int i;
2311 struct elim_table *p;
2312
2313 switch (code)
2314 {
2315 case LABEL_REF:
2316 if (LABEL_REF_NONLOCAL_P (x))
2317 return;
2318
2319 x = label_ref_label (x);
2320
2321 /* fall through */
2322
2323 case CODE_LABEL:
2324 /* If we know nothing about this label, set the desired offsets. Note
2325 that this sets the offset at a label to be the offset before a label
2326 if we don't know anything about the label. This is not correct for
2327 the label after a BARRIER, but is the best guess we can make. If
2328 we guessed wrong, we will suppress an elimination that might have
2329 been possible had we been able to guess correctly. */
2330
2331 if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
2332 {
2333 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2334 offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2335 = (initial_p ? reg_eliminate[i].initial_offset
2336 : reg_eliminate[i].offset);
2337 offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
2338 }
2339
2340 /* Otherwise, if this is the definition of a label and it is
2341 preceded by a BARRIER, set our offsets to the known offset of
2342 that label. */
2343
2344 else if (x == insn
2345 && (tem = prev_nonnote_insn (insn)) != 0
2346 && BARRIER_P (tem))
2347 set_offsets_for_label (insn);
2348 else
2349 /* If neither of the above cases is true, compare each offset
2350 with those previously recorded and suppress any eliminations
2351 where the offsets disagree. */
2352
2353 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2354 if (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2355 != (initial_p ? reg_eliminate[i].initial_offset
2356 : reg_eliminate[i].offset))
2357 reg_eliminate[i].can_eliminate = 0;
2358
2359 return;
2360
2361 case JUMP_TABLE_DATA:
2362 set_label_offsets (PATTERN (insn), insn, initial_p);
2363 return;
2364
2365 case JUMP_INSN:
2366 set_label_offsets (PATTERN (insn), insn, initial_p);
2367
2368 /* fall through */
2369
2370 case INSN:
2371 case CALL_INSN:
2372 /* Any labels mentioned in REG_LABEL_OPERAND notes can be branched
2373 to indirectly and hence must have all eliminations at their
2374 initial offsets. */
2375 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2376 if (REG_NOTE_KIND (tem) == REG_LABEL_OPERAND)
2377 set_label_offsets (XEXP (tem, 0), insn, 1);
2378 return;
2379
2380 case PARALLEL:
2381 case ADDR_VEC:
2382 case ADDR_DIFF_VEC:
2383 /* Each of the labels in the parallel or address vector must be
2384 at their initial offsets. We want the first field for PARALLEL
2385 and ADDR_VEC and the second field for ADDR_DIFF_VEC. */
2386
2387 for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2388 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2389 insn, initial_p);
2390 return;
2391
2392 case SET:
2393 /* We only care about setting PC. If the source is not RETURN,
2394 IF_THEN_ELSE, or a label, disable any eliminations not at
2395 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2396 isn't one of those possibilities. For branches to a label,
2397 call ourselves recursively.
2398
2399 Note that this can disable elimination unnecessarily when we have
2400 a non-local goto since it will look like a non-constant jump to
2401 someplace in the current function. This isn't a significant
2402 problem since such jumps will normally be when all elimination
2403 pairs are back to their initial offsets. */
2404
2405 if (SET_DEST (x) != pc_rtx)
2406 return;
2407
2408 switch (GET_CODE (SET_SRC (x)))
2409 {
2410 case PC:
2411 case RETURN:
2412 return;
2413
2414 case LABEL_REF:
2415 set_label_offsets (SET_SRC (x), insn, initial_p);
2416 return;
2417
2418 case IF_THEN_ELSE:
2419 tem = XEXP (SET_SRC (x), 1);
2420 if (GET_CODE (tem) == LABEL_REF)
2421 set_label_offsets (label_ref_label (tem), insn, initial_p);
2422 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2423 break;
2424
2425 tem = XEXP (SET_SRC (x), 2);
2426 if (GET_CODE (tem) == LABEL_REF)
2427 set_label_offsets (label_ref_label (tem), insn, initial_p);
2428 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2429 break;
2430 return;
2431
2432 default:
2433 break;
2434 }
2435
2436 /* If we reach here, all eliminations must be at their initial
2437 offset because we are doing a jump to a variable address. */
2438 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2439 if (p->offset != p->initial_offset)
2440 p->can_eliminate = 0;
2441 break;
2442
2443 default:
2444 break;
2445 }
2446}
2447
2448/* This function examines every reg that occurs in X and adjusts the
2449 costs for its elimination which are gathered by IRA. INSN is the
2450 insn in which X occurs. We do not recurse into MEM expressions. */
2451
2452static void
2453note_reg_elim_costly (const_rtx x, rtx insn)
2454{
2455 subrtx_iterator::array_type array;
2456 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
2457 {
2458 const_rtx x = *iter;
2459 if (MEM_P (x))
2460 iter.skip_subrtxes ();
2461 else if (REG_P (x)
2462 && REGNO (x) >= FIRST_PSEUDO_REGISTER
2463 && reg_equiv_init (REGNO (x))
2464 && reg_equiv_invariant (REGNO (x)))
2465 {
2466 rtx t = reg_equiv_invariant (REGNO (x));
2467 rtx new_rtx = eliminate_regs_1 (t, Pmode, insn, true, true);
2468 int cost = set_src_cost (new_rtx, Pmode,
2469 optimize_bb_for_speed_p (elim_bb));
2470 int freq = REG_FREQ_FROM_BB (elim_bb);
2471
2472 if (cost != 0)
2473 ira_adjust_equiv_reg_cost (REGNO (x), -cost * freq);
2474 }
2475 }
2476}
2477
2478/* Scan X and replace any eliminable registers (such as fp) with a
2479 replacement (such as sp), plus an offset.
2480
2481 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2482 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2483 MEM, we are allowed to replace a sum of a register and the constant zero
2484 with the register, which we cannot do outside a MEM. In addition, we need
2485 to record the fact that a register is referenced outside a MEM.
2486
2487 If INSN is an insn, it is the insn containing X. If we replace a REG
2488 in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2489 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2490 the REG is being modified.
2491
2492 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2493 That's used when we eliminate in expressions stored in notes.
2494 This means, do not set ref_outside_mem even if the reference
2495 is outside of MEMs.
2496
2497 If FOR_COSTS is true, we are being called before reload in order to
2498 estimate the costs of keeping registers with an equivalence unallocated.
2499
2500 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2501 replacements done assuming all offsets are at their initial values. If
2502 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2503 encounter, return the actual location so that find_reloads will do
2504 the proper thing. */
2505
2506static rtx
2507eliminate_regs_1 (rtx x, machine_mode mem_mode, rtx insn,
2508 bool may_use_invariant, bool for_costs)
2509{
2510 enum rtx_code code = GET_CODE (x);
2511 struct elim_table *ep;
2512 int regno;
2513 rtx new_rtx;
2514 int i, j;
2515 const char *fmt;
2516 int copied = 0;
2517
2518 if (! current_function_decl)
2519 return x;
2520
2521 switch (code)
2522 {
2523 CASE_CONST_ANY:
2524 case CONST:
2525 case SYMBOL_REF:
2526 case CODE_LABEL:
2527 case PC:
2528 case CC0:
2529 case ASM_INPUT:
2530 case ADDR_VEC:
2531 case ADDR_DIFF_VEC:
2532 case RETURN:
2533 return x;
2534
2535 case REG:
2536 regno = REGNO (x);
2537
2538 /* First handle the case where we encounter a bare register that
2539 is eliminable. Replace it with a PLUS. */
2540 if (regno < FIRST_PSEUDO_REGISTER)
2541 {
2542 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2543 ep++)
2544 if (ep->from_rtx == x && ep->can_eliminate)
2545 return plus_constant (Pmode, ep->to_rtx, ep->previous_offset);
2546
2547 }
2548 else if (reg_renumber && reg_renumber[regno] < 0
2549 && reg_equivs
2550 && reg_equiv_invariant (regno))
2551 {
2552 if (may_use_invariant || (insn && DEBUG_INSN_P (insn)))
2553 return eliminate_regs_1 (copy_rtx (reg_equiv_invariant (regno)),
2554 mem_mode, insn, true, for_costs);
2555 /* There exists at least one use of REGNO that cannot be
2556 eliminated. Prevent the defining insn from being deleted. */
2557 reg_equiv_init (regno) = NULL;
2558 if (!for_costs)
2559 alter_reg (regno, -1, true);
2560 }
2561 return x;
2562
2563 /* You might think handling MINUS in a manner similar to PLUS is a
2564 good idea. It is not. It has been tried multiple times and every
2565 time the change has had to have been reverted.
2566
2567 Other parts of reload know a PLUS is special (gen_reload for example)
2568 and require special code to handle code a reloaded PLUS operand.
2569
2570 Also consider backends where the flags register is clobbered by a
2571 MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2572 lea instruction comes to mind). If we try to reload a MINUS, we
2573 may kill the flags register that was holding a useful value.
2574
2575 So, please before trying to handle MINUS, consider reload as a
2576 whole instead of this little section as well as the backend issues. */
2577 case PLUS:
2578 /* If this is the sum of an eliminable register and a constant, rework
2579 the sum. */
2580 if (REG_P (XEXP (x, 0))
2581 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2582 && CONSTANT_P (XEXP (x, 1)))
2583 {
2584 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2585 ep++)
2586 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2587 {
2588 /* The only time we want to replace a PLUS with a REG (this
2589 occurs when the constant operand of the PLUS is the negative
2590 of the offset) is when we are inside a MEM. We won't want
2591 to do so at other times because that would change the
2592 structure of the insn in a way that reload can't handle.
2593 We special-case the commonest situation in
2594 eliminate_regs_in_insn, so just replace a PLUS with a
2595 PLUS here, unless inside a MEM. */
2596 if (mem_mode != 0 && CONST_INT_P (XEXP (x, 1))
2597 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2598 return ep->to_rtx;
2599 else
2600 return gen_rtx_PLUS (Pmode, ep->to_rtx,
2601 plus_constant (Pmode, XEXP (x, 1),
2602 ep->previous_offset));
2603 }
2604
2605 /* If the register is not eliminable, we are done since the other
2606 operand is a constant. */
2607 return x;
2608 }
2609
2610 /* If this is part of an address, we want to bring any constant to the
2611 outermost PLUS. We will do this by doing register replacement in
2612 our operands and seeing if a constant shows up in one of them.
2613
2614 Note that there is no risk of modifying the structure of the insn,
2615 since we only get called for its operands, thus we are either
2616 modifying the address inside a MEM, or something like an address
2617 operand of a load-address insn. */
2618
2619 {
2620 rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2621 for_costs);
2622 rtx new1 = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2623 for_costs);
2624
2625 if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
2626 {
2627 /* If one side is a PLUS and the other side is a pseudo that
2628 didn't get a hard register but has a reg_equiv_constant,
2629 we must replace the constant here since it may no longer
2630 be in the position of any operand. */
2631 if (GET_CODE (new0) == PLUS && REG_P (new1)
2632 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2633 && reg_renumber[REGNO (new1)] < 0
2634 && reg_equivs
2635 && reg_equiv_constant (REGNO (new1)) != 0)
2636 new1 = reg_equiv_constant (REGNO (new1));
2637 else if (GET_CODE (new1) == PLUS && REG_P (new0)
2638 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2639 && reg_renumber[REGNO (new0)] < 0
2640 && reg_equiv_constant (REGNO (new0)) != 0)
2641 new0 = reg_equiv_constant (REGNO (new0));
2642
2643 new_rtx = form_sum (GET_MODE (x), new0, new1);
2644
2645 /* As above, if we are not inside a MEM we do not want to
2646 turn a PLUS into something else. We might try to do so here
2647 for an addition of 0 if we aren't optimizing. */
2648 if (! mem_mode && GET_CODE (new_rtx) != PLUS)
2649 return gen_rtx_PLUS (GET_MODE (x), new_rtx, const0_rtx);
2650 else
2651 return new_rtx;
2652 }
2653 }
2654 return x;
2655
2656 case MULT:
2657 /* If this is the product of an eliminable register and a
2658 constant, apply the distribute law and move the constant out
2659 so that we have (plus (mult ..) ..). This is needed in order
2660 to keep load-address insns valid. This case is pathological.
2661 We ignore the possibility of overflow here. */
2662 if (REG_P (XEXP (x, 0))
2663 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2664 && CONST_INT_P (XEXP (x, 1)))
2665 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2666 ep++)
2667 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2668 {
2669 if (! mem_mode
2670 /* Refs inside notes or in DEBUG_INSNs don't count for
2671 this purpose. */
2672 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2673 || GET_CODE (insn) == INSN_LIST
2674 || DEBUG_INSN_P (insn))))
2675 ep->ref_outside_mem = 1;
2676
2677 return
2678 plus_constant (Pmode,
2679 gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2680 ep->previous_offset * INTVAL (XEXP (x, 1)));
2681 }
2682
2683 /* fall through */
2684
2685 case CALL:
2686 case COMPARE:
2687 /* See comments before PLUS about handling MINUS. */
2688 case MINUS:
2689 case DIV: case UDIV:
2690 case MOD: case UMOD:
2691 case AND: case IOR: case XOR:
2692 case ROTATERT: case ROTATE:
2693 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2694 case NE: case EQ:
2695 case GE: case GT: case GEU: case GTU:
2696 case LE: case LT: case LEU: case LTU:
2697 {
2698 rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2699 for_costs);
2700 rtx new1 = XEXP (x, 1)
2701 ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, false,
2702 for_costs) : 0;
2703
2704 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2705 return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2706 }
2707 return x;
2708
2709 case EXPR_LIST:
2710 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2711 if (XEXP (x, 0))
2712 {
2713 new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2714 for_costs);
2715 if (new_rtx != XEXP (x, 0))
2716 {
2717 /* If this is a REG_DEAD note, it is not valid anymore.
2718 Using the eliminated version could result in creating a
2719 REG_DEAD note for the stack or frame pointer. */
2720 if (REG_NOTE_KIND (x) == REG_DEAD)
2721 return (XEXP (x, 1)
2722 ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2723 for_costs)
2724 : NULL_RTX);
2725
2726 x = alloc_reg_note (REG_NOTE_KIND (x), new_rtx, XEXP (x, 1));
2727 }
2728 }
2729
2730 /* fall through */
2731
2732 case INSN_LIST:
2733 case INT_LIST:
2734 /* Now do eliminations in the rest of the chain. If this was
2735 an EXPR_LIST, this might result in allocating more memory than is
2736 strictly needed, but it simplifies the code. */
2737 if (XEXP (x, 1))
2738 {
2739 new_rtx = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2740 for_costs);
2741 if (new_rtx != XEXP (x, 1))
2742 return
2743 gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new_rtx);
2744 }
2745 return x;
2746
2747 case PRE_INC:
2748 case POST_INC:
2749 case PRE_DEC:
2750 case POST_DEC:
2751 /* We do not support elimination of a register that is modified.
2752 elimination_effects has already make sure that this does not
2753 happen. */
2754 return x;
2755
2756 case PRE_MODIFY:
2757 case POST_MODIFY:
2758 /* We do not support elimination of a register that is modified.
2759 elimination_effects has already make sure that this does not
2760 happen. The only remaining case we need to consider here is
2761 that the increment value may be an eliminable register. */
2762 if (GET_CODE (XEXP (x, 1)) == PLUS
2763 && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2764 {
2765 rtx new_rtx = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode,
2766 insn, true, for_costs);
2767
2768 if (new_rtx != XEXP (XEXP (x, 1), 1))
2769 return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0),
2770 gen_rtx_PLUS (GET_MODE (x),
2771 XEXP (x, 0), new_rtx));
2772 }
2773 return x;
2774
2775 case STRICT_LOW_PART:
2776 case NEG: case NOT:
2777 case SIGN_EXTEND: case ZERO_EXTEND:
2778 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2779 case FLOAT: case FIX:
2780 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2781 case ABS:
2782 case SQRT:
2783 case FFS:
2784 case CLZ:
2785 case CTZ:
2786 case POPCOUNT:
2787 case PARITY:
2788 case BSWAP:
2789 new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2790 for_costs);
2791 if (new_rtx != XEXP (x, 0))
2792 return gen_rtx_fmt_e (code, GET_MODE (x), new_rtx);
2793 return x;
2794
2795 case SUBREG:
2796 /* Similar to above processing, but preserve SUBREG_BYTE.
2797 Convert (subreg (mem)) to (mem) if not paradoxical.
2798 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2799 pseudo didn't get a hard reg, we must replace this with the
2800 eliminated version of the memory location because push_reload
2801 may do the replacement in certain circumstances. */
2802 if (REG_P (SUBREG_REG (x))
2803 && !paradoxical_subreg_p (x)
2804 && reg_equivs
2805 && reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
2806 {
2807 new_rtx = SUBREG_REG (x);
2808 }
2809 else
2810 new_rtx = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false, for_costs);
2811
2812 if (new_rtx != SUBREG_REG (x))
2813 {
2814 int x_size = GET_MODE_SIZE (GET_MODE (x));
2815 int new_size = GET_MODE_SIZE (GET_MODE (new_rtx));
2816
2817 if (MEM_P (new_rtx)
2818 && ((partial_subreg_p (GET_MODE (x), GET_MODE (new_rtx))
2819 /* On RISC machines, combine can create rtl of the form
2820 (set (subreg:m1 (reg:m2 R) 0) ...)
2821 where m1 < m2, and expects something interesting to
2822 happen to the entire word. Moreover, it will use the
2823 (reg:m2 R) later, expecting all bits to be preserved.
2824 So if the number of words is the same, preserve the
2825 subreg so that push_reload can see it. */
2826 && !(WORD_REGISTER_OPERATIONS
2827 && (x_size - 1) / UNITS_PER_WORD
2828 == (new_size -1 ) / UNITS_PER_WORD))
2829 || x_size == new_size)
2830 )
2831 return adjust_address_nv (new_rtx, GET_MODE (x), SUBREG_BYTE (x));
2832 else if (insn && GET_CODE (insn) == DEBUG_INSN)
2833 return gen_rtx_raw_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x));
2834 else
2835 return gen_rtx_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x));
2836 }
2837
2838 return x;
2839
2840 case MEM:
2841 /* Our only special processing is to pass the mode of the MEM to our
2842 recursive call and copy the flags. While we are here, handle this
2843 case more efficiently. */
2844
2845 new_rtx = eliminate_regs_1 (XEXP (x, 0), GET_MODE (x), insn, true,
2846 for_costs);
2847 if (for_costs
2848 && memory_address_p (GET_MODE (x), XEXP (x, 0))
2849 && !memory_address_p (GET_MODE (x), new_rtx))
2850 note_reg_elim_costly (XEXP (x, 0), insn);
2851
2852 return replace_equiv_address_nv (x, new_rtx);
2853
2854 case USE:
2855 /* Handle insn_list USE that a call to a pure function may generate. */
2856 new_rtx = eliminate_regs_1 (XEXP (x, 0), VOIDmode, insn, false,
2857 for_costs);
2858 if (new_rtx != XEXP (x, 0))
2859 return gen_rtx_USE (GET_MODE (x), new_rtx);
2860 return x;
2861
2862 case CLOBBER:
2863 case ASM_OPERANDS:
2864 gcc_assert (insn && DEBUG_INSN_P (insn));
2865 break;
2866
2867 case SET:
2868 gcc_unreachable ();
2869
2870 default:
2871 break;
2872 }
2873
2874 /* Process each of our operands recursively. If any have changed, make a
2875 copy of the rtx. */
2876 fmt = GET_RTX_FORMAT (code);
2877 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2878 {
2879 if (*fmt == 'e')
2880 {
2881 new_rtx = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false,
2882 for_costs);
2883 if (new_rtx != XEXP (x, i) && ! copied)
2884 {
2885 x = shallow_copy_rtx (x);
2886 copied = 1;
2887 }
2888 XEXP (x, i) = new_rtx;
2889 }
2890 else if (*fmt == 'E')
2891 {
2892 int copied_vec = 0;
2893 for (j = 0; j < XVECLEN (x, i); j++)
2894 {
2895 new_rtx = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false,
2896 for_costs);
2897 if (new_rtx != XVECEXP (x, i, j) && ! copied_vec)
2898 {
2899 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2900 XVEC (x, i)->elem);
2901 if (! copied)
2902 {
2903 x = shallow_copy_rtx (x);
2904 copied = 1;
2905 }
2906 XVEC (x, i) = new_v;
2907 copied_vec = 1;
2908 }
2909 XVECEXP (x, i, j) = new_rtx;
2910 }
2911 }
2912 }
2913
2914 return x;
2915}
2916
2917rtx
2918eliminate_regs (rtx x, machine_mode mem_mode, rtx insn)
2919{
2920 if (reg_eliminate == NULL)
2921 {
2922 gcc_assert (targetm.no_register_allocation);
2923 return x;
2924 }
2925 return eliminate_regs_1 (x, mem_mode, insn, false, false);
2926}
2927
2928/* Scan rtx X for modifications of elimination target registers. Update
2929 the table of eliminables to reflect the changed state. MEM_MODE is
2930 the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM. */
2931
2932static void
2933elimination_effects (rtx x, machine_mode mem_mode)
2934{
2935 enum rtx_code code = GET_CODE (x);
2936 struct elim_table *ep;
2937 int regno;
2938 int i, j;
2939 const char *fmt;
2940
2941 switch (code)
2942 {
2943 CASE_CONST_ANY:
2944 case CONST:
2945 case SYMBOL_REF:
2946 case CODE_LABEL:
2947 case PC:
2948 case CC0:
2949 case ASM_INPUT:
2950 case ADDR_VEC:
2951 case ADDR_DIFF_VEC:
2952 case RETURN:
2953 return;
2954
2955 case REG:
2956 regno = REGNO (x);
2957
2958 /* First handle the case where we encounter a bare register that
2959 is eliminable. Replace it with a PLUS. */
2960 if (regno < FIRST_PSEUDO_REGISTER)
2961 {
2962 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2963 ep++)
2964 if (ep->from_rtx == x && ep->can_eliminate)
2965 {
2966 if (! mem_mode)
2967 ep->ref_outside_mem = 1;
2968 return;
2969 }
2970
2971 }
2972 else if (reg_renumber[regno] < 0
2973 && reg_equivs
2974 && reg_equiv_constant (regno)
2975 && ! function_invariant_p (reg_equiv_constant (regno)))
2976 elimination_effects (reg_equiv_constant (regno), mem_mode);
2977 return;
2978
2979 case PRE_INC:
2980 case POST_INC:
2981 case PRE_DEC:
2982 case POST_DEC:
2983 case POST_MODIFY:
2984 case PRE_MODIFY:
2985 /* If we modify the source of an elimination rule, disable it. */
2986 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2987 if (ep->from_rtx == XEXP (x, 0))
2988 ep->can_eliminate = 0;
2989
2990 /* If we modify the target of an elimination rule by adding a constant,
2991 update its offset. If we modify the target in any other way, we'll
2992 have to disable the rule as well. */
2993 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2994 if (ep->to_rtx == XEXP (x, 0))
2995 {
2996 int size = GET_MODE_SIZE (mem_mode);
2997
2998 /* If more bytes than MEM_MODE are pushed, account for them. */
2999#ifdef PUSH_ROUNDING
3000 if (ep->to_rtx == stack_pointer_rtx)
3001 size = PUSH_ROUNDING (size);
3002#endif
3003 if (code == PRE_DEC || code == POST_DEC)
3004 ep->offset += size;
3005 else if (code == PRE_INC || code == POST_INC)
3006 ep->offset -= size;
3007 else if (code == PRE_MODIFY || code == POST_MODIFY)
3008 {
3009 if (GET_CODE (XEXP (x, 1)) == PLUS
3010 && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
3011 && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
3012 ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
3013 else
3014 ep->can_eliminate = 0;
3015 }
3016 }
3017
3018 /* These two aren't unary operators. */
3019 if (code == POST_MODIFY || code == PRE_MODIFY)
3020 break;
3021
3022 /* Fall through to generic unary operation case. */
3023 gcc_fallthrough ();
3024 case STRICT_LOW_PART:
3025 case NEG: case NOT:
3026 case SIGN_EXTEND: case ZERO_EXTEND:
3027 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3028 case FLOAT: case FIX:
3029 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3030 case ABS:
3031 case SQRT:
3032 case FFS:
3033 case CLZ:
3034 case CTZ:
3035 case POPCOUNT:
3036 case PARITY:
3037 case BSWAP:
3038 elimination_effects (XEXP (x, 0), mem_mode);
3039 return;
3040
3041 case SUBREG:
3042 if (REG_P (SUBREG_REG (x))
3043 && !paradoxical_subreg_p (x)
3044 && reg_equivs
3045 && reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
3046 return;
3047
3048 elimination_effects (SUBREG_REG (x), mem_mode);
3049 return;
3050
3051 case USE:
3052 /* If using a register that is the source of an eliminate we still
3053 think can be performed, note it cannot be performed since we don't
3054 know how this register is used. */
3055 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3056 if (ep->from_rtx == XEXP (x, 0))
3057 ep->can_eliminate = 0;
3058
3059 elimination_effects (XEXP (x, 0), mem_mode);
3060 return;
3061
3062 case CLOBBER:
3063 /* If clobbering a register that is the replacement register for an
3064 elimination we still think can be performed, note that it cannot
3065 be performed. Otherwise, we need not be concerned about it. */
3066 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3067 if (ep->to_rtx == XEXP (x, 0))
3068 ep->can_eliminate = 0;
3069
3070 elimination_effects (XEXP (x, 0), mem_mode);
3071 return;
3072
3073 case SET:
3074 /* Check for setting a register that we know about. */
3075 if (REG_P (SET_DEST (x)))
3076 {
3077 /* See if this is setting the replacement register for an
3078 elimination.
3079
3080 If DEST is the hard frame pointer, we do nothing because we
3081 assume that all assignments to the frame pointer are for
3082 non-local gotos and are being done at a time when they are valid
3083 and do not disturb anything else. Some machines want to
3084 eliminate a fake argument pointer (or even a fake frame pointer)
3085 with either the real frame or the stack pointer. Assignments to
3086 the hard frame pointer must not prevent this elimination. */
3087
3088 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3089 ep++)
3090 if (ep->to_rtx == SET_DEST (x)
3091 && SET_DEST (x) != hard_frame_pointer_rtx)
3092 {
3093 /* If it is being incremented, adjust the offset. Otherwise,
3094 this elimination can't be done. */
3095 rtx src = SET_SRC (x);
3096
3097 if (GET_CODE (src) == PLUS
3098 && XEXP (src, 0) == SET_DEST (x)
3099 && CONST_INT_P (XEXP (src, 1)))
3100 ep->offset -= INTVAL (XEXP (src, 1));
3101 else
3102 ep->can_eliminate = 0;
3103 }
3104 }
3105
3106 elimination_effects (SET_DEST (x), VOIDmode);
3107 elimination_effects (SET_SRC (x), VOIDmode);
3108 return;
3109
3110 case MEM:
3111 /* Our only special processing is to pass the mode of the MEM to our
3112 recursive call. */
3113 elimination_effects (XEXP (x, 0), GET_MODE (x));
3114 return;
3115
3116 default:
3117 break;
3118 }
3119
3120 fmt = GET_RTX_FORMAT (code);
3121 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3122 {
3123 if (*fmt == 'e')
3124 elimination_effects (XEXP (x, i), mem_mode);
3125 else if (*fmt == 'E')
3126 for (j = 0; j < XVECLEN (x, i); j++)
3127 elimination_effects (XVECEXP (x, i, j), mem_mode);
3128 }
3129}
3130
3131/* Descend through rtx X and verify that no references to eliminable registers
3132 remain. If any do remain, mark the involved register as not
3133 eliminable. */
3134
3135static void
3136check_eliminable_occurrences (rtx x)
3137{
3138 const char *fmt;
3139 int i;
3140 enum rtx_code code;
3141
3142 if (x == 0)
3143 return;
3144
3145 code = GET_CODE (x);
3146
3147 if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3148 {
3149 struct elim_table *ep;
3150
3151 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3152 if (ep->from_rtx == x)
3153 ep->can_eliminate = 0;
3154 return;
3155 }
3156
3157 fmt = GET_RTX_FORMAT (code);
3158 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3159 {
3160 if (*fmt == 'e')
3161 check_eliminable_occurrences (XEXP (x, i));
3162 else if (*fmt == 'E')
3163 {
3164 int j;
3165 for (j = 0; j < XVECLEN (x, i); j++)
3166 check_eliminable_occurrences (XVECEXP (x, i, j));
3167 }
3168 }
3169}
3170
3171/* Scan INSN and eliminate all eliminable registers in it.
3172
3173 If REPLACE is nonzero, do the replacement destructively. Also
3174 delete the insn as dead it if it is setting an eliminable register.
3175
3176 If REPLACE is zero, do all our allocations in reload_obstack.
3177
3178 If no eliminations were done and this insn doesn't require any elimination
3179 processing (these are not identical conditions: it might be updating sp,
3180 but not referencing fp; this needs to be seen during reload_as_needed so
3181 that the offset between fp and sp can be taken into consideration), zero
3182 is returned. Otherwise, 1 is returned. */
3183
3184static int
3185eliminate_regs_in_insn (rtx_insn *insn, int replace)
3186{
3187 int icode = recog_memoized (insn);
3188 rtx old_body = PATTERN (insn);
3189 int insn_is_asm = asm_noperands (old_body) >= 0;
3190 rtx old_set = single_set (insn);
3191 rtx new_body;
3192 int val = 0;
3193 int i;
3194 rtx substed_operand[MAX_RECOG_OPERANDS];
3195 rtx orig_operand[MAX_RECOG_OPERANDS];
3196 struct elim_table *ep;
3197 rtx plus_src, plus_cst_src;
3198
3199 if (! insn_is_asm && icode < 0)
3200 {
3201 gcc_assert (DEBUG_INSN_P (insn)
3202 || GET_CODE (PATTERN (insn)) == USE
3203 || GET_CODE (PATTERN (insn)) == CLOBBER
3204 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
3205 if (DEBUG_INSN_P (insn))
3206 INSN_VAR_LOCATION_LOC (insn)
3207 = eliminate_regs (INSN_VAR_LOCATION_LOC (insn), VOIDmode, insn);
3208 return 0;
3209 }
3210
3211 if (old_set != 0 && REG_P (SET_DEST (old_set))
3212 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3213 {
3214 /* Check for setting an eliminable register. */
3215 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3216 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3217 {
3218 /* If this is setting the frame pointer register to the
3219 hardware frame pointer register and this is an elimination
3220 that will be done (tested above), this insn is really
3221 adjusting the frame pointer downward to compensate for
3222 the adjustment done before a nonlocal goto. */
3223 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
3224 && ep->from == FRAME_POINTER_REGNUM
3225 && ep->to == HARD_FRAME_POINTER_REGNUM)
3226 {
3227 rtx base = SET_SRC (old_set);
3228 rtx_insn *base_insn = insn;
3229 HOST_WIDE_INT offset = 0;
3230
3231 while (base != ep->to_rtx)
3232 {
3233 rtx_insn *prev_insn;
3234 rtx prev_set;
3235
3236 if (GET_CODE (base) == PLUS
3237 && CONST_INT_P (XEXP (base, 1)))
3238 {
3239 offset += INTVAL (XEXP (base, 1));
3240 base = XEXP (base, 0);
3241 }
3242 else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0
3243 && (prev_set = single_set (prev_insn)) != 0
3244 && rtx_equal_p (SET_DEST (prev_set), base))
3245 {
3246 base = SET_SRC (prev_set);
3247 base_insn = prev_insn;
3248 }
3249 else
3250 break;
3251 }
3252
3253 if (base == ep->to_rtx)
3254 {
3255 rtx src = plus_constant (Pmode, ep->to_rtx,
3256 offset - ep->offset);
3257
3258 new_body = old_body;
3259 if (! replace)
3260 {
3261 new_body = copy_insn (old_body);
3262 if (REG_NOTES (insn))
3263 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3264 }
3265 PATTERN (insn) = new_body;
3266 old_set = single_set (insn);
3267
3268 /* First see if this insn remains valid when we
3269 make the change. If not, keep the INSN_CODE
3270 the same and let reload fit it up. */
3271 validate_change (insn, &SET_SRC (old_set), src, 1);
3272 validate_change (insn, &SET_DEST (old_set),
3273 ep->to_rtx, 1);
3274 if (! apply_change_group ())
3275 {
3276 SET_SRC (old_set) = src;
3277 SET_DEST (old_set) = ep->to_rtx;
3278 }
3279
3280 val = 1;
3281 goto done;
3282 }
3283 }
3284
3285 /* In this case this insn isn't serving a useful purpose. We
3286 will delete it in reload_as_needed once we know that this
3287 elimination is, in fact, being done.
3288
3289 If REPLACE isn't set, we can't delete this insn, but needn't
3290 process it since it won't be used unless something changes. */
3291 if (replace)
3292 {
3293 delete_dead_insn (insn);
3294 return 1;
3295 }
3296 val = 1;
3297 goto done;
3298 }
3299 }
3300
3301 /* We allow one special case which happens to work on all machines we
3302 currently support: a single set with the source or a REG_EQUAL
3303 note being a PLUS of an eliminable register and a constant. */
3304 plus_src = plus_cst_src = 0;
3305 if (old_set && REG_P (SET_DEST (old_set)))
3306 {
3307 if (GET_CODE (SET_SRC (old_set)) == PLUS)
3308 plus_src = SET_SRC (old_set);
3309 /* First see if the source is of the form (plus (...) CST). */
3310 if (plus_src
3311 && CONST_INT_P (XEXP (plus_src, 1)))
3312 plus_cst_src = plus_src;
3313 else if (REG_P (SET_SRC (old_set))
3314 || plus_src)
3315 {
3316 /* Otherwise, see if we have a REG_EQUAL note of the form
3317 (plus (...) CST). */
3318 rtx links;
3319 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3320 {
3321 if ((REG_NOTE_KIND (links) == REG_EQUAL
3322 || REG_NOTE_KIND (links) == REG_EQUIV)
3323 && GET_CODE (XEXP (links, 0)) == PLUS
3324 && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3325 {
3326 plus_cst_src = XEXP (links, 0);
3327 break;
3328 }
3329 }
3330 }
3331
3332 /* Check that the first operand of the PLUS is a hard reg or
3333 the lowpart subreg of one. */
3334 if (plus_cst_src)
3335 {
3336 rtx reg = XEXP (plus_cst_src, 0);
3337 if (GET_CODE (reg) == SUBREG && subreg_lowpart_p (reg))
3338 reg = SUBREG_REG (reg);
3339
3340 if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3341 plus_cst_src = 0;
3342 }
3343 }
3344 if (plus_cst_src)
3345 {
3346 rtx reg = XEXP (plus_cst_src, 0);
3347 HOST_WIDE_INT offset = INTVAL (XEXP (plus_cst_src, 1));
3348
3349 if (GET_CODE (reg) == SUBREG)
3350 reg = SUBREG_REG (reg);
3351
3352 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3353 if (ep->from_rtx == reg && ep->can_eliminate)
3354 {
3355 rtx to_rtx = ep->to_rtx;
3356 offset += ep->offset;
3357 offset = trunc_int_for_mode (offset, GET_MODE (plus_cst_src));
3358
3359 if (GET_CODE (XEXP (plus_cst_src, 0)) == SUBREG)
3360 to_rtx = gen_lowpart (GET_MODE (XEXP (plus_cst_src, 0)),
3361 to_rtx);
3362 /* If we have a nonzero offset, and the source is already
3363 a simple REG, the following transformation would
3364 increase the cost of the insn by replacing a simple REG
3365 with (plus (reg sp) CST). So try only when we already
3366 had a PLUS before. */
3367 if (offset == 0 || plus_src)
3368 {
3369 rtx new_src = plus_constant (GET_MODE (to_rtx),
3370 to_rtx, offset);
3371
3372 new_body = old_body;
3373 if (! replace)
3374 {
3375 new_body = copy_insn (old_body);
3376 if (REG_NOTES (insn))
3377 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3378 }
3379 PATTERN (insn) = new_body;
3380 old_set = single_set (insn);
3381
3382 /* First see if this insn remains valid when we make the
3383 change. If not, try to replace the whole pattern with
3384 a simple set (this may help if the original insn was a
3385 PARALLEL that was only recognized as single_set due to
3386 REG_UNUSED notes). If this isn't valid either, keep
3387 the INSN_CODE the same and let reload fix it up. */
3388 if (!validate_change (insn, &SET_SRC (old_set), new_src, 0))
3389 {
3390 rtx new_pat = gen_rtx_SET (SET_DEST (old_set), new_src);
3391
3392 if (!validate_change (insn, &PATTERN (insn), new_pat, 0))
3393 SET_SRC (old_set) = new_src;
3394 }
3395 }
3396 else
3397 break;
3398
3399 val = 1;
3400 /* This can't have an effect on elimination offsets, so skip right
3401 to the end. */
3402 goto done;
3403 }
3404 }
3405
3406 /* Determine the effects of this insn on elimination offsets. */
3407 elimination_effects (old_body, VOIDmode);
3408
3409 /* Eliminate all eliminable registers occurring in operands that
3410 can be handled by reload. */
3411 extract_insn (insn);
3412 for (i = 0; i < recog_data.n_operands; i++)
3413 {
3414 orig_operand[i] = recog_data.operand[i];
3415 substed_operand[i] = recog_data.operand[i];
3416
3417 /* For an asm statement, every operand is eliminable. */
3418 if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3419 {
3420 bool is_set_src, in_plus;
3421
3422 /* Check for setting a register that we know about. */
3423 if (recog_data.operand_type[i] != OP_IN
3424 && REG_P (orig_operand[i]))
3425 {
3426 /* If we are assigning to a register that can be eliminated, it
3427 must be as part of a PARALLEL, since the code above handles
3428 single SETs. We must indicate that we can no longer
3429 eliminate this reg. */
3430 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3431 ep++)
3432 if (ep->from_rtx == orig_operand[i])
3433 ep->can_eliminate = 0;
3434 }
3435
3436 /* Companion to the above plus substitution, we can allow
3437 invariants as the source of a plain move. */
3438 is_set_src = false;
3439 if (old_set
3440 && recog_data.operand_loc[i] == &SET_SRC (old_set))
3441 is_set_src = true;
3442 in_plus = false;
3443 if (plus_src
3444 && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3445 || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3446 in_plus = true;
3447
3448 substed_operand[i]
3449 = eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3450 replace ? insn : NULL_RTX,
3451 is_set_src || in_plus, false);
3452 if (substed_operand[i] != orig_operand[i])
3453 val = 1;
3454 /* Terminate the search in check_eliminable_occurrences at
3455 this point. */
3456 *recog_data.operand_loc[i] = 0;
3457
3458 /* If an output operand changed from a REG to a MEM and INSN is an
3459 insn, write a CLOBBER insn. */
3460 if (recog_data.operand_type[i] != OP_IN
3461 && REG_P (orig_operand[i])
3462 && MEM_P (substed_operand[i])
3463 && replace)
3464 emit_insn_after (gen_clobber (orig_operand[i]), insn);
3465 }
3466 }
3467
3468 for (i = 0; i < recog_data.n_dups; i++)
3469 *recog_data.dup_loc[i]
3470 = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3471
3472 /* If any eliminable remain, they aren't eliminable anymore. */
3473 check_eliminable_occurrences (old_body);
3474
3475 /* Substitute the operands; the new values are in the substed_operand
3476 array. */
3477 for (i = 0; i < recog_data.n_operands; i++)
3478 *recog_data.operand_loc[i] = substed_operand[i];
3479 for (i = 0; i < recog_data.n_dups; i++)
3480 *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3481
3482 /* If we are replacing a body that was a (set X (plus Y Z)), try to
3483 re-recognize the insn. We do this in case we had a simple addition
3484 but now can do this as a load-address. This saves an insn in this
3485 common case.
3486 If re-recognition fails, the old insn code number will still be used,
3487 and some register operands may have changed into PLUS expressions.
3488 These will be handled by find_reloads by loading them into a register
3489 again. */
3490
3491 if (val)
3492 {
3493 /* If we aren't replacing things permanently and we changed something,
3494 make another copy to ensure that all the RTL is new. Otherwise
3495 things can go wrong if find_reload swaps commutative operands
3496 and one is inside RTL that has been copied while the other is not. */
3497 new_body = old_body;
3498 if (! replace)
3499 {
3500 new_body = copy_insn (old_body);
3501 if (REG_NOTES (insn))
3502 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3503 }
3504 PATTERN (insn) = new_body;
3505
3506 /* If we had a move insn but now we don't, rerecognize it. This will
3507 cause spurious re-recognition if the old move had a PARALLEL since
3508 the new one still will, but we can't call single_set without
3509 having put NEW_BODY into the insn and the re-recognition won't
3510 hurt in this rare case. */
3511 /* ??? Why this huge if statement - why don't we just rerecognize the
3512 thing always? */
3513 if (! insn_is_asm
3514 && old_set != 0
3515 && ((REG_P (SET_SRC (old_set))
3516 && (GET_CODE (new_body) != SET
3517 || !REG_P (SET_SRC (new_body))))
3518 /* If this was a load from or store to memory, compare
3519 the MEM in recog_data.operand to the one in the insn.
3520 If they are not equal, then rerecognize the insn. */
3521 || (old_set != 0
3522 && ((MEM_P (SET_SRC (old_set))
3523 && SET_SRC (old_set) != recog_data.operand[1])
3524 || (MEM_P (SET_DEST (old_set))
3525 && SET_DEST (old_set) != recog_data.operand[0])))
3526 /* If this was an add insn before, rerecognize. */
3527 || GET_CODE (SET_SRC (old_set)) == PLUS))
3528 {
3529 int new_icode = recog (PATTERN (insn), insn, 0);
3530 if (new_icode >= 0)
3531 INSN_CODE (insn) = new_icode;
3532 }
3533 }
3534
3535 /* Restore the old body. If there were any changes to it, we made a copy
3536 of it while the changes were still in place, so we'll correctly return
3537 a modified insn below. */
3538 if (! replace)
3539 {
3540 /* Restore the old body. */
3541 for (i = 0; i < recog_data.n_operands; i++)
3542 /* Restoring a top-level match_parallel would clobber the new_body
3543 we installed in the insn. */
3544 if (recog_data.operand_loc[i] != &PATTERN (insn))
3545 *recog_data.operand_loc[i] = orig_operand[i];
3546 for (i = 0; i < recog_data.n_dups; i++)
3547 *recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3548 }
3549
3550 /* Update all elimination pairs to reflect the status after the current
3551 insn. The changes we make were determined by the earlier call to
3552 elimination_effects.
3553
3554 We also detect cases where register elimination cannot be done,
3555 namely, if a register would be both changed and referenced outside a MEM
3556 in the resulting insn since such an insn is often undefined and, even if
3557 not, we cannot know what meaning will be given to it. Note that it is
3558 valid to have a register used in an address in an insn that changes it
3559 (presumably with a pre- or post-increment or decrement).
3560
3561 If anything changes, return nonzero. */
3562
3563 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3564 {
3565 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3566 ep->can_eliminate = 0;
3567
3568 ep->ref_outside_mem = 0;
3569
3570 if (ep->previous_offset != ep->offset)
3571 val = 1;
3572 }
3573
3574 done:
3575 /* If we changed something, perform elimination in REG_NOTES. This is
3576 needed even when REPLACE is zero because a REG_DEAD note might refer
3577 to a register that we eliminate and could cause a different number
3578 of spill registers to be needed in the final reload pass than in
3579 the pre-passes. */
3580 if (val && REG_NOTES (insn) != 0)
3581 REG_NOTES (insn)
3582 = eliminate_regs_1 (REG_NOTES (insn), VOIDmode, REG_NOTES (insn), true,
3583 false);
3584
3585 return val;
3586}
3587
3588/* Like eliminate_regs_in_insn, but only estimate costs for the use of the
3589 register allocator. INSN is the instruction we need to examine, we perform
3590 eliminations in its operands and record cases where eliminating a reg with
3591 an invariant equivalence would add extra cost. */
3592
3593#pragma GCC diagnostic push
3594#pragma GCC diagnostic warning "-Wmaybe-uninitialized"
3595static void
3596elimination_costs_in_insn (rtx_insn *insn)
3597{
3598 int icode = recog_memoized (insn);
3599 rtx old_body = PATTERN (insn);
3600 int insn_is_asm = asm_noperands (old_body) >= 0;
3601 rtx old_set = single_set (insn);
3602 int i;
3603 rtx orig_operand[MAX_RECOG_OPERANDS];
3604 rtx orig_dup[MAX_RECOG_OPERANDS];
3605 struct elim_table *ep;
3606 rtx plus_src, plus_cst_src;
3607 bool sets_reg_p;
3608
3609 if (! insn_is_asm && icode < 0)
3610 {
3611 gcc_assert (DEBUG_INSN_P (insn)
3612 || GET_CODE (PATTERN (insn)) == USE
3613 || GET_CODE (PATTERN (insn)) == CLOBBER
3614 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
3615 return;
3616 }
3617
3618 if (old_set != 0 && REG_P (SET_DEST (old_set))
3619 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3620 {
3621 /* Check for setting an eliminable register. */
3622 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3623 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3624 return;
3625 }
3626
3627 /* We allow one special case which happens to work on all machines we
3628 currently support: a single set with the source or a REG_EQUAL
3629 note being a PLUS of an eliminable register and a constant. */
3630 plus_src = plus_cst_src = 0;
3631 sets_reg_p = false;
3632 if (old_set && REG_P (SET_DEST (old_set)))
3633 {
3634 sets_reg_p = true;
3635 if (GET_CODE (SET_SRC (old_set)) == PLUS)
3636 plus_src = SET_SRC (old_set);
3637 /* First see if the source is of the form (plus (...) CST). */
3638 if (plus_src
3639 && CONST_INT_P (XEXP (plus_src, 1)))
3640 plus_cst_src = plus_src;
3641 else if (REG_P (SET_SRC (old_set))
3642 || plus_src)
3643 {
3644 /* Otherwise, see if we have a REG_EQUAL note of the form
3645 (plus (...) CST). */
3646 rtx links;
3647 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3648 {
3649 if ((REG_NOTE_KIND (links) == REG_EQUAL
3650 || REG_NOTE_KIND (links) == REG_EQUIV)
3651 && GET_CODE (XEXP (links, 0)) == PLUS
3652 && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3653 {
3654 plus_cst_src = XEXP (links, 0);
3655 break;
3656 }
3657 }
3658 }
3659 }
3660
3661 /* Determine the effects of this insn on elimination offsets. */
3662 elimination_effects (old_body, VOIDmode);
3663
3664 /* Eliminate all eliminable registers occurring in operands that
3665 can be handled by reload. */
3666 extract_insn (insn);
3667 int n_dups = recog_data.n_dups;
3668 for (i = 0; i < n_dups; i++)
3669 orig_dup[i] = *recog_data.dup_loc[i];
3670
3671 int n_operands = recog_data.n_operands;
3672 for (i = 0; i < n_operands; i++)
3673 {
3674 orig_operand[i] = recog_data.operand[i];
3675
3676 /* For an asm statement, every operand is eliminable. */
3677 if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3678 {
3679 bool is_set_src, in_plus;
3680
3681 /* Check for setting a register that we know about. */
3682 if (recog_data.operand_type[i] != OP_IN
3683 && REG_P (orig_operand[i]))
3684 {
3685 /* If we are assigning to a register that can be eliminated, it
3686 must be as part of a PARALLEL, since the code above handles
3687 single SETs. We must indicate that we can no longer
3688 eliminate this reg. */
3689 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3690 ep++)
3691 if (ep->from_rtx == orig_operand[i])
3692 ep->can_eliminate = 0;
3693 }
3694
3695 /* Companion to the above plus substitution, we can allow
3696 invariants as the source of a plain move. */
3697 is_set_src = false;
3698 if (old_set && recog_data.operand_loc[i] == &SET_SRC (old_set))
3699 is_set_src = true;
3700 if (is_set_src && !sets_reg_p)
3701 note_reg_elim_costly (SET_SRC (old_set), insn);
3702 in_plus = false;
3703 if (plus_src && sets_reg_p
3704 && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3705 || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3706 in_plus = true;
3707
3708 eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3709 NULL_RTX,
3710 is_set_src || in_plus, true);
3711 /* Terminate the search in check_eliminable_occurrences at
3712 this point. */
3713 *recog_data.operand_loc[i] = 0;
3714 }
3715 }
3716
3717 for (i = 0; i < n_dups; i++)
3718 *recog_data.dup_loc[i]
3719 = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3720
3721 /* If any eliminable remain, they aren't eliminable anymore. */
3722 check_eliminable_occurrences (old_body);
3723
3724 /* Restore the old body. */
3725 for (i = 0; i < n_operands; i++)
3726 *recog_data.operand_loc[i] = orig_operand[i];
3727 for (i = 0; i < n_dups; i++)
3728 *recog_data.dup_loc[i] = orig_dup[i];
3729
3730 /* Update all elimination pairs to reflect the status after the current
3731 insn. The changes we make were determined by the earlier call to
3732 elimination_effects. */
3733
3734 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3735 {
3736 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3737 ep->can_eliminate = 0;
3738
3739 ep->ref_outside_mem = 0;
3740 }
3741
3742 return;
3743}
3744#pragma GCC diagnostic pop
3745
3746/* Loop through all elimination pairs.
3747 Recalculate the number not at initial offset.
3748
3749 Compute the maximum offset (minimum offset if the stack does not
3750 grow downward) for each elimination pair. */
3751
3752static void
3753update_eliminable_offsets (void)
3754{
3755 struct elim_table *ep;
3756
3757 num_not_at_initial_offset = 0;
3758 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3759 {
3760 ep->previous_offset = ep->offset;
3761 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3762 num_not_at_initial_offset++;
3763 }
3764}
3765
3766/* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3767 replacement we currently believe is valid, mark it as not eliminable if X
3768 modifies DEST in any way other than by adding a constant integer to it.
3769
3770 If DEST is the frame pointer, we do nothing because we assume that
3771 all assignments to the hard frame pointer are nonlocal gotos and are being
3772 done at a time when they are valid and do not disturb anything else.
3773 Some machines want to eliminate a fake argument pointer with either the
3774 frame or stack pointer. Assignments to the hard frame pointer must not
3775 prevent this elimination.
3776
3777 Called via note_stores from reload before starting its passes to scan
3778 the insns of the function. */
3779
3780static void
3781mark_not_eliminable (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
3782{
3783 unsigned int i;
3784
3785 /* A SUBREG of a hard register here is just changing its mode. We should
3786 not see a SUBREG of an eliminable hard register, but check just in
3787 case. */
3788 if (GET_CODE (dest) == SUBREG)
3789 dest = SUBREG_REG (dest);
3790
3791 if (dest == hard_frame_pointer_rtx)
3792 return;
3793
3794 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3795 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3796 && (GET_CODE (x) != SET
3797 || GET_CODE (SET_SRC (x)) != PLUS
3798 || XEXP (SET_SRC (x), 0) != dest
3799 || !CONST_INT_P (XEXP (SET_SRC (x), 1))))
3800 {
3801 reg_eliminate[i].can_eliminate_previous
3802 = reg_eliminate[i].can_eliminate = 0;
3803 num_eliminable--;
3804 }
3805}
3806
3807/* Verify that the initial elimination offsets did not change since the
3808 last call to set_initial_elim_offsets. This is used to catch cases
3809 where something illegal happened during reload_as_needed that could
3810 cause incorrect code to be generated if we did not check for it. */
3811
3812static bool
3813verify_initial_elim_offsets (void)
3814{
3815 HOST_WIDE_INT t;
3816 struct elim_table *ep;
3817
3818 if (!num_eliminable)
3819 return true;
3820
3821 targetm.compute_frame_layout ();
3822 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3823 {
3824 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3825 if (t != ep->initial_offset)
3826 return false;
3827 }
3828
3829 return true;
3830}
3831
3832/* Reset all offsets on eliminable registers to their initial values. */
3833
3834static void
3835set_initial_elim_offsets (void)
3836{
3837 struct elim_table *ep = reg_eliminate;
3838
3839 targetm.compute_frame_layout ();
3840 for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3841 {
3842 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3843 ep->previous_offset = ep->offset = ep->initial_offset;
3844 }
3845
3846 num_not_at_initial_offset = 0;
3847}
3848
3849/* Subroutine of set_initial_label_offsets called via for_each_eh_label. */
3850
3851static void
3852set_initial_eh_label_offset (rtx label)
3853{
3854 set_label_offsets (label, NULL, 1);
3855}
3856
3857/* Initialize the known label offsets.
3858 Set a known offset for each forced label to be at the initial offset
3859 of each elimination. We do this because we assume that all
3860 computed jumps occur from a location where each elimination is
3861 at its initial offset.
3862 For all other labels, show that we don't know the offsets. */
3863
3864static void
3865set_initial_label_offsets (void)
3866{
3867 memset (offsets_known_at, 0, num_labels);
3868
3869 unsigned int i;
3870 rtx_insn *insn;
3871 FOR_EACH_VEC_SAFE_ELT (forced_labels, i, insn)
3872 set_label_offsets (insn, NULL, 1);
3873
3874 for (rtx_insn_list *x = nonlocal_goto_handler_labels; x; x = x->next ())
3875 if (x->insn ())
3876 set_label_offsets (x->insn (), NULL, 1);
3877
3878 for_each_eh_label (set_initial_eh_label_offset);
3879}
3880
3881/* Set all elimination offsets to the known values for the code label given
3882 by INSN. */
3883
3884static void
3885set_offsets_for_label (rtx_insn *insn)
3886{
3887 unsigned int i;
3888 int label_nr = CODE_LABEL_NUMBER (insn);
3889 struct elim_table *ep;
3890
3891 num_not_at_initial_offset = 0;
3892 for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3893 {
3894 ep->offset = ep->previous_offset
3895 = offsets_at[label_nr - first_label_num][i];
3896 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3897 num_not_at_initial_offset++;
3898 }
3899}
3900
3901/* See if anything that happened changes which eliminations are valid.
3902 For example, on the SPARC, whether or not the frame pointer can
3903 be eliminated can depend on what registers have been used. We need
3904 not check some conditions again (such as flag_omit_frame_pointer)
3905 since they can't have changed. */
3906
3907static void
3908update_eliminables (HARD_REG_SET *pset)
3909{
3910 int previous_frame_pointer_needed = frame_pointer_needed;
3911 struct elim_table *ep;
3912
3913 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3914 if ((ep->from == HARD_FRAME_POINTER_REGNUM
3915 && targetm.frame_pointer_required ())
3916 || ! targetm.can_eliminate (ep->from, ep->to)
3917 )
3918 ep->can_eliminate = 0;
3919
3920 /* Look for the case where we have discovered that we can't replace
3921 register A with register B and that means that we will now be
3922 trying to replace register A with register C. This means we can
3923 no longer replace register C with register B and we need to disable
3924 such an elimination, if it exists. This occurs often with A == ap,
3925 B == sp, and C == fp. */
3926
3927 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3928 {
3929 struct elim_table *op;
3930 int new_to = -1;
3931
3932 if (! ep->can_eliminate && ep->can_eliminate_previous)
3933 {
3934 /* Find the current elimination for ep->from, if there is a
3935 new one. */
3936 for (op = reg_eliminate;
3937 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3938 if (op->from == ep->from && op->can_eliminate)
3939 {
3940 new_to = op->to;
3941 break;
3942 }
3943
3944 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
3945 disable it. */
3946 for (op = reg_eliminate;
3947 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3948 if (op->from == new_to && op->to == ep->to)
3949 op->can_eliminate = 0;
3950 }
3951 }
3952
3953 /* See if any registers that we thought we could eliminate the previous
3954 time are no longer eliminable. If so, something has changed and we
3955 must spill the register. Also, recompute the number of eliminable
3956 registers and see if the frame pointer is needed; it is if there is
3957 no elimination of the frame pointer that we can perform. */
3958
3959 frame_pointer_needed = 1;
3960 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3961 {
3962 if (ep->can_eliminate
3963 && ep->from == FRAME_POINTER_REGNUM
3964 && ep->to != HARD_FRAME_POINTER_REGNUM
3965 && (! SUPPORTS_STACK_ALIGNMENT
3966 || ! crtl->stack_realign_needed))
3967 frame_pointer_needed = 0;
3968
3969 if (! ep->can_eliminate && ep->can_eliminate_previous)
3970 {
3971 ep->can_eliminate_previous = 0;
3972 SET_HARD_REG_BIT (*pset, ep->from);
3973 num_eliminable--;
3974 }
3975 }
3976
3977 /* If we didn't need a frame pointer last time, but we do now, spill
3978 the hard frame pointer. */
3979 if (frame_pointer_needed && ! previous_frame_pointer_needed)
3980 SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
3981}
3982
3983/* Call update_eliminables an spill any registers we can't eliminate anymore.
3984 Return true iff a register was spilled. */
3985
3986static bool
3987update_eliminables_and_spill (void)
3988{
3989 int i;
3990 bool did_spill = false;
3991 HARD_REG_SET to_spill;
3992 CLEAR_HARD_REG_SET (to_spill);
3993 update_eliminables (&to_spill);
3994 AND_COMPL_HARD_REG_SET (used_spill_regs, to_spill);
3995
3996 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3997 if (TEST_HARD_REG_BIT (to_spill, i))
3998 {
3999 spill_hard_reg (i, 1);
4000 did_spill = true;
4001
4002 /* Regardless of the state of spills, if we previously had
4003 a register that we thought we could eliminate, but now can
4004 not eliminate, we must run another pass.
4005
4006 Consider pseudos which have an entry in reg_equiv_* which
4007 reference an eliminable register. We must make another pass
4008 to update reg_equiv_* so that we do not substitute in the
4009 old value from when we thought the elimination could be
4010 performed. */
4011 }
4012 return did_spill;
4013}
4014
4015/* Return true if X is used as the target register of an elimination. */
4016
4017bool
4018elimination_target_reg_p (rtx x)
4019{
4020 struct elim_table *ep;
4021
4022 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4023 if (ep->to_rtx == x && ep->can_eliminate)
4024 return true;
4025
4026 return false;
4027}
4028
4029/* Initialize the table of registers to eliminate.
4030 Pre-condition: global flag frame_pointer_needed has been set before
4031 calling this function. */
4032
4033static void
4034init_elim_table (void)
4035{
4036 struct elim_table *ep;
4037 const struct elim_table_1 *ep1;
4038
4039 if (!reg_eliminate)
4040 reg_eliminate = XCNEWVEC (struct elim_table, NUM_ELIMINABLE_REGS);
4041
4042 num_eliminable = 0;
4043
4044 for (ep = reg_eliminate, ep1 = reg_eliminate_1;
4045 ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
4046 {
4047 ep->from = ep1->from;
4048 ep->to = ep1->to;
4049 ep->can_eliminate = ep->can_eliminate_previous
4050 = (targetm.can_eliminate (ep->from, ep->to)
4051 && ! (ep->to == STACK_POINTER_REGNUM
4052 && frame_pointer_needed
4053 && (! SUPPORTS_STACK_ALIGNMENT
4054 || ! stack_realign_fp)));
4055 }
4056
4057 /* Count the number of eliminable registers and build the FROM and TO
4058 REG rtx's. Note that code in gen_rtx_REG will cause, e.g.,
4059 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
4060 We depend on this. */
4061 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4062 {
4063 num_eliminable += ep->can_eliminate;
4064 ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
4065 ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
4066 }
4067}
4068
4069/* Find all the pseudo registers that didn't get hard regs
4070 but do have known equivalent constants or memory slots.
4071 These include parameters (known equivalent to parameter slots)
4072 and cse'd or loop-moved constant memory addresses.
4073
4074 Record constant equivalents in reg_equiv_constant
4075 so they will be substituted by find_reloads.
4076 Record memory equivalents in reg_mem_equiv so they can
4077 be substituted eventually by altering the REG-rtx's. */
4078
4079static void
4080init_eliminable_invariants (rtx_insn *first, bool do_subregs)
4081{
4082 int i;
4083 rtx_insn *insn;
4084
4085 grow_reg_equivs ();
4086 if (do_subregs)
4087 reg_max_ref_mode = XCNEWVEC (machine_mode, max_regno);
4088 else
4089 reg_max_ref_mode = NULL;
4090
4091 num_eliminable_invariants = 0;
4092
4093 first_label_num = get_first_label_num ();
4094 num_labels = max_label_num () - first_label_num;
4095
4096 /* Allocate the tables used to store offset information at labels. */
4097 offsets_known_at = XNEWVEC (char, num_labels);
4098 offsets_at = (HOST_WIDE_INT (*)[NUM_ELIMINABLE_REGS]) xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (HOST_WIDE_INT));
4099
4100/* Look for REG_EQUIV notes; record what each pseudo is equivalent
4101 to. If DO_SUBREGS is true, also find all paradoxical subregs and
4102 find largest such for each pseudo. FIRST is the head of the insn
4103 list. */
4104
4105 for (insn = first; insn; insn = NEXT_INSN (insn))
4106 {
4107 rtx set = single_set (insn);
4108
4109 /* We may introduce USEs that we want to remove at the end, so
4110 we'll mark them with QImode. Make sure there are no
4111 previously-marked insns left by say regmove. */
4112 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
4113 && GET_MODE (insn) != VOIDmode)
4114 PUT_MODE (insn, VOIDmode);
4115
4116 if (do_subregs && NONDEBUG_INSN_P (insn))
4117 scan_paradoxical_subregs (PATTERN (insn));
4118
4119 if (set != 0 && REG_P (SET_DEST (set)))
4120 {
4121 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
4122 rtx x;
4123
4124 if (! note)
4125 continue;
4126
4127 i = REGNO (SET_DEST (set));
4128 x = XEXP (note, 0);
4129
4130 if (i <= LAST_VIRTUAL_REGISTER)
4131 continue;
4132
4133 /* If flag_pic and we have constant, verify it's legitimate. */
4134 if (!CONSTANT_P (x)
4135 || !flag_pic || LEGITIMATE_PIC_OPERAND_P (x))
4136 {
4137 /* It can happen that a REG_EQUIV note contains a MEM
4138 that is not a legitimate memory operand. As later
4139 stages of reload assume that all addresses found
4140 in the reg_equiv_* arrays were originally legitimate,
4141 we ignore such REG_EQUIV notes. */
4142 if (memory_operand (x, VOIDmode))
4143 {
4144 /* Always unshare the equivalence, so we can
4145 substitute into this insn without touching the
4146 equivalence. */
4147 reg_equiv_memory_loc (i) = copy_rtx (x);
4148 }
4149 else if (function_invariant_p (x))
4150 {
4151 machine_mode mode;
4152
4153 mode = GET_MODE (SET_DEST (set));
4154 if (GET_CODE (x) == PLUS)
4155 {
4156 /* This is PLUS of frame pointer and a constant,
4157 and might be shared. Unshare it. */
4158 reg_equiv_invariant (i) = copy_rtx (x);
4159 num_eliminable_invariants++;
4160 }
4161 else if (x == frame_pointer_rtx || x == arg_pointer_rtx)
4162 {
4163 reg_equiv_invariant (i) = x;
4164 num_eliminable_invariants++;
4165 }
4166 else if (targetm.legitimate_constant_p (mode, x))
4167 reg_equiv_constant (i) = x;
4168 else
4169 {
4170 reg_equiv_memory_loc (i) = force_const_mem (mode, x);
4171 if (! reg_equiv_memory_loc (i))
4172 reg_equiv_init (i) = NULL;
4173 }
4174 }
4175 else
4176 {
4177 reg_equiv_init (i) = NULL;
4178 continue;
4179 }
4180 }
4181 else
4182 reg_equiv_init (i) = NULL;
4183 }
4184 }
4185
4186 if (dump_file)
4187 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4188 if (reg_equiv_init (i))
4189 {
4190 fprintf (dump_file, "init_insns for %u: ", i);
4191 print_inline_rtx (dump_file, reg_equiv_init (i), 20);
4192 fprintf (dump_file, "\n");
4193 }
4194}
4195
4196/* Indicate that we no longer have known memory locations or constants.
4197 Free all data involved in tracking these. */
4198
4199static void
4200free_reg_equiv (void)
4201{
4202 int i;
4203
4204 free (offsets_known_at);
4205 free (offsets_at);
4206 offsets_at = 0;
4207 offsets_known_at = 0;
4208
4209 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4210 if (reg_equiv_alt_mem_list (i))
4211 free_EXPR_LIST_list (&reg_equiv_alt_mem_list (i));
4212 vec_free (reg_equivs);
4213}
4214
4215/* Kick all pseudos out of hard register REGNO.
4216
4217 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
4218 because we found we can't eliminate some register. In the case, no pseudos
4219 are allowed to be in the register, even if they are only in a block that
4220 doesn't require spill registers, unlike the case when we are spilling this
4221 hard reg to produce another spill register.
4222
4223 Return nonzero if any pseudos needed to be kicked out. */
4224
4225static void
4226spill_hard_reg (unsigned int regno, int cant_eliminate)
4227{
4228 int i;
4229
4230 if (cant_eliminate)
4231 {
4232 SET_HARD_REG_BIT (bad_spill_regs_global, regno);
4233 df_set_regs_ever_live (regno, true);
4234 }
4235
4236 /* Spill every pseudo reg that was allocated to this reg
4237 or to something that overlaps this reg. */
4238
4239 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4240 if (reg_renumber[i] >= 0
4241 && (unsigned int) reg_renumber[i] <= regno
4242 && end_hard_regno (PSEUDO_REGNO_MODE (i), reg_renumber[i]) > regno)
4243 SET_REGNO_REG_SET (&spilled_pseudos, i);
4244}
4245
4246/* After spill_hard_reg was called and/or find_reload_regs was run for all
4247 insns that need reloads, this function is used to actually spill pseudo
4248 registers and try to reallocate them. It also sets up the spill_regs
4249 array for use by choose_reload_regs.
4250
4251 GLOBAL nonzero means we should attempt to reallocate any pseudo registers
4252 that we displace from hard registers. */
4253
4254static int
4255finish_spills (int global)
4256{
4257 struct insn_chain *chain;
4258 int something_changed = 0;
4259 unsigned i;
4260 reg_set_iterator rsi;
4261
4262 /* Build the spill_regs array for the function. */
4263 /* If there are some registers still to eliminate and one of the spill regs
4264 wasn't ever used before, additional stack space may have to be
4265 allocated to store this register. Thus, we may have changed the offset
4266 between the stack and frame pointers, so mark that something has changed.
4267
4268 One might think that we need only set VAL to 1 if this is a call-used
4269 register. However, the set of registers that must be saved by the
4270 prologue is not identical to the call-used set. For example, the
4271 register used by the call insn for the return PC is a call-used register,
4272 but must be saved by the prologue. */
4273
4274 n_spills = 0;
4275 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4276 if (TEST_HARD_REG_BIT (used_spill_regs, i))
4277 {
4278 spill_reg_order[i] = n_spills;
4279 spill_regs[n_spills++] = i;
4280 if (num_eliminable && ! df_regs_ever_live_p (i))
4281 something_changed = 1;
4282 df_set_regs_ever_live (i, true);
4283 }
4284 else
4285 spill_reg_order[i] = -1;
4286
4287 EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i, rsi)
4288 if (! ira_conflicts_p || reg_renumber[i] >= 0)
4289 {
4290 /* Record the current hard register the pseudo is allocated to
4291 in pseudo_previous_regs so we avoid reallocating it to the
4292 same hard reg in a later pass. */
4293 gcc_assert (reg_renumber[i] >= 0);
4294
4295 SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
4296 /* Mark it as no longer having a hard register home. */
4297 reg_renumber[i] = -1;
4298 if (ira_conflicts_p)
4299 /* Inform IRA about the change. */
4300 ira_mark_allocation_change (i);
4301 /* We will need to scan everything again. */
4302 something_changed = 1;
4303 }
4304
4305 /* Retry global register allocation if possible. */
4306 if (global && ira_conflicts_p)
4307 {
4308 unsigned int n;
4309
4310 memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
4311 /* For every insn that needs reloads, set the registers used as spill
4312 regs in pseudo_forbidden_regs for every pseudo live across the
4313 insn. */
4314 for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
4315 {
4316 EXECUTE_IF_SET_IN_REG_SET
4317 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
4318 {
4319 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4320 chain->used_spill_regs);
4321 }
4322 EXECUTE_IF_SET_IN_REG_SET
4323 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
4324 {
4325 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4326 chain->used_spill_regs);
4327 }
4328 }
4329
4330 /* Retry allocating the pseudos spilled in IRA and the
4331 reload. For each reg, merge the various reg sets that
4332 indicate which hard regs can't be used, and call
4333 ira_reassign_pseudos. */
4334 for (n = 0, i = FIRST_PSEUDO_REGISTER; i < (unsigned) max_regno; i++)
4335 if (reg_old_renumber[i] != reg_renumber[i])
4336 {
4337 if (reg_renumber[i] < 0)
4338 temp_pseudo_reg_arr[n++] = i;
4339 else
4340 CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
4341 }
4342 if (ira_reassign_pseudos (temp_pseudo_reg_arr, n,
4343 bad_spill_regs_global,
4344 pseudo_forbidden_regs, pseudo_previous_regs,
4345 &spilled_pseudos))
4346 something_changed = 1;
4347 }
4348 /* Fix up the register information in the insn chain.
4349 This involves deleting those of the spilled pseudos which did not get
4350 a new hard register home from the live_{before,after} sets. */
4351 for (chain = reload_insn_chain; chain; chain = chain->next)
4352 {
4353 HARD_REG_SET used_by_pseudos;
4354 HARD_REG_SET used_by_pseudos2;
4355
4356 if (! ira_conflicts_p)
4357 {
4358 /* Don't do it for IRA because IRA and the reload still can
4359 assign hard registers to the spilled pseudos on next
4360 reload iterations. */
4361 AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
4362 AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
4363 }
4364 /* Mark any unallocated hard regs as available for spills. That
4365 makes inheritance work somewhat better. */
4366 if (chain->need_reload)
4367 {
4368 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
4369 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
4370 IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
4371
4372 compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
4373 compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
4374 /* Value of chain->used_spill_regs from previous iteration
4375 may be not included in the value calculated here because
4376 of possible removing caller-saves insns (see function
4377 delete_caller_save_insns. */
4378 COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
4379 AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
4380 }
4381 }
4382
4383 CLEAR_REG_SET (&changed_allocation_pseudos);
4384 /* Let alter_reg modify the reg rtx's for the modified pseudos. */
4385 for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
4386 {
4387 int regno = reg_renumber[i];
4388 if (reg_old_renumber[i] == regno)
4389 continue;
4390
4391 SET_REGNO_REG_SET (&changed_allocation_pseudos, i);
4392
4393 alter_reg (i, reg_old_renumber[i], false);
4394 reg_old_renumber[i] = regno;
4395 if (dump_file)
4396 {
4397 if (regno == -1)
4398 fprintf (dump_file, " Register %d now on stack.\n\n", i);
4399 else
4400 fprintf (dump_file, " Register %d now in %d.\n\n",
4401 i, reg_renumber[i]);
4402 }
4403 }
4404
4405 return something_changed;
4406}
4407
4408/* Find all paradoxical subregs within X and update reg_max_ref_mode. */
4409
4410static void
4411scan_paradoxical_subregs (rtx x)
4412{
4413 int i;
4414 const char *fmt;
4415 enum rtx_code code = GET_CODE (x);
4416
4417 switch (code)
4418 {
4419 case REG:
4420 case CONST:
4421 case SYMBOL_REF:
4422 case LABEL_REF:
4423 CASE_CONST_ANY:
4424 case CC0:
4425 case PC:
4426 case USE:
4427 case CLOBBER:
4428 return;
4429
4430 case SUBREG:
4431 if (REG_P (SUBREG_REG (x)))
4432 {
4433 unsigned int regno = REGNO (SUBREG_REG (x));
4434 if (partial_subreg_p (reg_max_ref_mode[regno], GET_MODE (x)))
4435 {
4436 reg_max_ref_mode[regno] = GET_MODE (x);
4437 mark_home_live_1 (regno, GET_MODE (x));
4438 }
4439 }
4440 return;
4441
4442 default:
4443 break;
4444 }
4445
4446 fmt = GET_RTX_FORMAT (code);
4447 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4448 {
4449 if (fmt[i] == 'e')
4450 scan_paradoxical_subregs (XEXP (x, i));
4451 else if (fmt[i] == 'E')
4452 {
4453 int j;
4454 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4455 scan_paradoxical_subregs (XVECEXP (x, i, j));
4456 }
4457 }
4458}
4459
4460/* *OP_PTR and *OTHER_PTR are two operands to a conceptual reload.
4461 If *OP_PTR is a paradoxical subreg, try to remove that subreg
4462 and apply the corresponding narrowing subreg to *OTHER_PTR.
4463 Return true if the operands were changed, false otherwise. */
4464
4465static bool
4466strip_paradoxical_subreg (rtx *op_ptr, rtx *other_ptr)
4467{
4468 rtx op, inner, other, tem;
4469
4470 op = *op_ptr;
4471 if (!paradoxical_subreg_p (op))
4472 return false;
4473 inner = SUBREG_REG (op);
4474
4475 other = *other_ptr;
4476 tem = gen_lowpart_common (GET_MODE (inner), other);
4477 if (!tem)
4478 return false;
4479
4480 /* If the lowpart operation turned a hard register into a subreg,
4481 rather than simplifying it to another hard register, then the
4482 mode change cannot be properly represented. For example, OTHER
4483 might be valid in its current mode, but not in the new one. */
4484 if (GET_CODE (tem) == SUBREG
4485 && REG_P (other)
4486 && HARD_REGISTER_P (other))
4487 return false;
4488
4489 *op_ptr = inner;
4490 *other_ptr = tem;
4491 return true;
4492}
4493
4494/* A subroutine of reload_as_needed. If INSN has a REG_EH_REGION note,
4495 examine all of the reload insns between PREV and NEXT exclusive, and
4496 annotate all that may trap. */
4497
4498static void
4499fixup_eh_region_note (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
4500{
4501 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
4502 if (note == NULL)
4503 return;
4504 if (!insn_could_throw_p (insn))
4505 remove_note (insn, note);
4506 copy_reg_eh_region_note_forward (note, NEXT_INSN (prev), next);
4507}
4508
4509/* Reload pseudo-registers into hard regs around each insn as needed.
4510 Additional register load insns are output before the insn that needs it
4511 and perhaps store insns after insns that modify the reloaded pseudo reg.
4512
4513 reg_last_reload_reg and reg_reloaded_contents keep track of
4514 which registers are already available in reload registers.
4515 We update these for the reloads that we perform,
4516 as the insns are scanned. */
4517
4518static void
4519reload_as_needed (int live_known)
4520{
4521 struct insn_chain *chain;
4522#if AUTO_INC_DEC
4523 int i;
4524#endif
4525 rtx_note *marker;
4526
4527 memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
4528 memset (spill_reg_store, 0, sizeof spill_reg_store);
4529 reg_last_reload_reg = XCNEWVEC (rtx, max_regno);
4530 INIT_REG_SET (&reg_has_output_reload);
4531 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4532 CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered);
4533
4534 set_initial_elim_offsets ();
4535
4536 /* Generate a marker insn that we will move around. */
4537 marker = emit_note (NOTE_INSN_DELETED);
4538 unlink_insn_chain (marker, marker);
4539
4540 for (chain = reload_insn_chain; chain; chain = chain->next)
4541 {
4542 rtx_insn *prev = 0;
4543 rtx_insn *insn = chain->insn;
4544 rtx_insn *old_next = NEXT_INSN (insn);
4545#if AUTO_INC_DEC
4546 rtx_insn *old_prev = PREV_INSN (insn);
4547#endif
4548
4549 if (will_delete_init_insn_p (insn))
4550 continue;
4551
4552 /* If we pass a label, copy the offsets from the label information
4553 into the current offsets of each elimination. */
4554 if (LABEL_P (insn))
4555 set_offsets_for_label (insn);
4556
4557 else if (INSN_P (insn))
4558 {
4559 regset_head regs_to_forget;
4560 INIT_REG_SET (&regs_to_forget);
4561 note_stores (PATTERN (insn), forget_old_reloads_1, &regs_to_forget);
4562
4563 /* If this is a USE and CLOBBER of a MEM, ensure that any
4564 references to eliminable registers have been removed. */
4565
4566 if ((GET_CODE (PATTERN (insn)) == USE
4567 || GET_CODE (PATTERN (insn)) == CLOBBER)
4568 && MEM_P (XEXP (PATTERN (insn), 0)))
4569 XEXP (XEXP (PATTERN (insn), 0), 0)
4570 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4571 GET_MODE (XEXP (PATTERN (insn), 0)),
4572 NULL_RTX);
4573
4574 /* If we need to do register elimination processing, do so.
4575 This might delete the insn, in which case we are done. */
4576 if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
4577 {
4578 eliminate_regs_in_insn (insn, 1);
4579 if (NOTE_P (insn))
4580 {
4581 update_eliminable_offsets ();
4582 CLEAR_REG_SET (&regs_to_forget);
4583 continue;
4584 }
4585 }
4586
4587 /* If need_elim is nonzero but need_reload is zero, one might think
4588 that we could simply set n_reloads to 0. However, find_reloads
4589 could have done some manipulation of the insn (such as swapping
4590 commutative operands), and these manipulations are lost during
4591 the first pass for every insn that needs register elimination.
4592 So the actions of find_reloads must be redone here. */
4593
4594 if (! chain->need_elim && ! chain->need_reload
4595 && ! chain->need_operand_change)
4596 n_reloads = 0;
4597 /* First find the pseudo regs that must be reloaded for this insn.
4598 This info is returned in the tables reload_... (see reload.h).
4599 Also modify the body of INSN by substituting RELOAD
4600 rtx's for those pseudo regs. */
4601 else
4602 {
4603 CLEAR_REG_SET (&reg_has_output_reload);
4604 CLEAR_HARD_REG_SET (reg_is_output_reload);
4605
4606 find_reloads (insn, 1, spill_indirect_levels, live_known,
4607 spill_reg_order);
4608 }
4609
4610 if (n_reloads > 0)
4611 {
4612 rtx_insn *next = NEXT_INSN (insn);
4613
4614 /* ??? PREV can get deleted by reload inheritance.
4615 Work around this by emitting a marker note. */
4616 prev = PREV_INSN (insn);
4617 reorder_insns_nobb (marker, marker, prev);
4618
4619 /* Now compute which reload regs to reload them into. Perhaps
4620 reusing reload regs from previous insns, or else output
4621 load insns to reload them. Maybe output store insns too.
4622 Record the choices of reload reg in reload_reg_rtx. */
4623 choose_reload_regs (chain);
4624
4625 /* Generate the insns to reload operands into or out of
4626 their reload regs. */
4627 emit_reload_insns (chain);
4628
4629 /* Substitute the chosen reload regs from reload_reg_rtx
4630 into the insn's body (or perhaps into the bodies of other
4631 load and store insn that we just made for reloading
4632 and that we moved the structure into). */
4633 subst_reloads (insn);
4634
4635 prev = PREV_INSN (marker);
4636 unlink_insn_chain (marker, marker);
4637
4638 /* Adjust the exception region notes for loads and stores. */
4639 if (cfun->can_throw_non_call_exceptions && !CALL_P (insn))
4640 fixup_eh_region_note (insn, prev, next);
4641
4642 /* Adjust the location of REG_ARGS_SIZE. */
4643 rtx p = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
4644 if (p)
4645 {
4646 remove_note (insn, p);
4647 fixup_args_size_notes (prev, PREV_INSN (next),
4648 INTVAL (XEXP (p, 0)));
4649 }
4650
4651 /* If this was an ASM, make sure that all the reload insns
4652 we have generated are valid. If not, give an error
4653 and delete them. */
4654 if (asm_noperands (PATTERN (insn)) >= 0)
4655 for (rtx_insn *p = NEXT_INSN (prev);
4656 p != next;
4657 p = NEXT_INSN (p))
4658 if (p != insn && INSN_P (p)
4659 && GET_CODE (PATTERN (p)) != USE
4660 && (recog_memoized (p) < 0
4661 || (extract_insn (p),
4662 !(constrain_operands (1,
4663 get_enabled_alternatives (p))))))
4664 {
4665 error_for_asm (insn,
4666 "%<asm%> operand requires "
4667 "impossible reload");
4668 delete_insn (p);
4669 }
4670 }
4671
4672 if (num_eliminable && chain->need_elim)
4673 update_eliminable_offsets ();
4674
4675 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4676 is no longer validly lying around to save a future reload.
4677 Note that this does not detect pseudos that were reloaded
4678 for this insn in order to be stored in
4679 (obeying register constraints). That is correct; such reload
4680 registers ARE still valid. */
4681 forget_marked_reloads (&regs_to_forget);
4682 CLEAR_REG_SET (&regs_to_forget);
4683
4684 /* There may have been CLOBBER insns placed after INSN. So scan
4685 between INSN and NEXT and use them to forget old reloads. */
4686 for (rtx_insn *x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4687 if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
4688 note_stores (PATTERN (x), forget_old_reloads_1, NULL);
4689
4690#if AUTO_INC_DEC
4691 /* Likewise for regs altered by auto-increment in this insn.
4692 REG_INC notes have been changed by reloading:
4693 find_reloads_address_1 records substitutions for them,
4694 which have been performed by subst_reloads above. */
4695 for (i = n_reloads - 1; i >= 0; i--)
4696 {
4697 rtx in_reg = rld[i].in_reg;
4698 if (in_reg)
4699 {
4700 enum rtx_code code = GET_CODE (in_reg);
4701 /* PRE_INC / PRE_DEC will have the reload register ending up
4702 with the same value as the stack slot, but that doesn't
4703 hold true for POST_INC / POST_DEC. Either we have to
4704 convert the memory access to a true POST_INC / POST_DEC,
4705 or we can't use the reload register for inheritance. */
4706 if ((code == POST_INC || code == POST_DEC)
4707 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4708 REGNO (rld[i].reg_rtx))
4709 /* Make sure it is the inc/dec pseudo, and not
4710 some other (e.g. output operand) pseudo. */
4711 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4712 == REGNO (XEXP (in_reg, 0))))
4713
4714 {
4715 rtx reload_reg = rld[i].reg_rtx;
4716 machine_mode mode = GET_MODE (reload_reg);
4717 int n = 0;
4718 rtx_insn *p;
4719
4720 for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4721 {
4722 /* We really want to ignore REG_INC notes here, so
4723 use PATTERN (p) as argument to reg_set_p . */
4724 if (reg_set_p (reload_reg, PATTERN (p)))
4725 break;
4726 n = count_occurrences (PATTERN (p), reload_reg, 0);
4727 if (! n)
4728 continue;
4729 if (n == 1)
4730 {
4731 rtx replace_reg
4732 = gen_rtx_fmt_e (code, mode, reload_reg);
4733
4734 validate_replace_rtx_group (reload_reg,
4735 replace_reg, p);
4736 n = verify_changes (0);
4737
4738 /* We must also verify that the constraints
4739 are met after the replacement. Make sure
4740 extract_insn is only called for an insn
4741 where the replacements were found to be
4742 valid so far. */
4743 if (n)
4744 {
4745 extract_insn (p);
4746 n = constrain_operands (1,
4747 get_enabled_alternatives (p));
4748 }
4749
4750 /* If the constraints were not met, then
4751 undo the replacement, else confirm it. */
4752 if (!n)
4753 cancel_changes (0);
4754 else
4755 confirm_change_group ();
4756 }
4757 break;
4758 }
4759 if (n == 1)
4760 {
4761 add_reg_note (p, REG_INC, reload_reg);
4762 /* Mark this as having an output reload so that the
4763 REG_INC processing code below won't invalidate
4764 the reload for inheritance. */
4765 SET_HARD_REG_BIT (reg_is_output_reload,
4766 REGNO (reload_reg));
4767 SET_REGNO_REG_SET (&reg_has_output_reload,
4768 REGNO (XEXP (in_reg, 0)));
4769 }
4770 else
4771 forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4772 NULL);
4773 }
4774 else if ((code == PRE_INC || code == PRE_DEC)
4775 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4776 REGNO (rld[i].reg_rtx))
4777