1/* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20/* This file contains subroutines used only from the file reload1.c.
21 It knows how to scan one insn for operands and values
22 that need to be copied into registers to make valid code.
23 It also finds other operands and values which are valid
24 but for which equivalent values in registers exist and
25 ought to be used instead.
26
27 Before processing the first insn of the function, call `init_reload'.
28 init_reload actually has to be called earlier anyway.
29
30 To scan an insn, call `find_reloads'. This does two things:
31 1. sets up tables describing which values must be reloaded
32 for this insn, and what kind of hard regs they must be reloaded into;
33 2. optionally record the locations where those values appear in
34 the data, so they can be replaced properly later.
35 This is done only if the second arg to `find_reloads' is nonzero.
36
37 The third arg to `find_reloads' specifies the number of levels
38 of indirect addressing supported by the machine. If it is zero,
39 indirect addressing is not valid. If it is one, (MEM (REG n))
40 is valid even if (REG n) did not get a hard register; if it is two,
41 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
42 hard register, and similarly for higher values.
43
44 Then you must choose the hard regs to reload those pseudo regs into,
45 and generate appropriate load insns before this insn and perhaps
46 also store insns after this insn. Set up the array `reload_reg_rtx'
47 to contain the REG rtx's for the registers you used. In some
48 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
49 for certain reloads. Then that tells you which register to use,
50 so you do not need to allocate one. But you still do need to add extra
51 instructions to copy the value into and out of that register.
52
53 Finally you must call `subst_reloads' to substitute the reload reg rtx's
54 into the locations already recorded.
55
56NOTE SIDE EFFECTS:
57
58 find_reloads can alter the operands of the instruction it is called on.
59
60 1. Two operands of any sort may be interchanged, if they are in a
61 commutative instruction.
62 This happens only if find_reloads thinks the instruction will compile
63 better that way.
64
65 2. Pseudo-registers that are equivalent to constants are replaced
66 with those constants if they are not in hard registers.
67
681 happens every time find_reloads is called.
692 happens only when REPLACE is 1, which is only when
70actually doing the reloads, not when just counting them.
71
72Using a reload register for several reloads in one insn:
73
74When an insn has reloads, it is considered as having three parts:
75the input reloads, the insn itself after reloading, and the output reloads.
76Reloads of values used in memory addresses are often needed for only one part.
77
78When this is so, reload_when_needed records which part needs the reload.
79Two reloads for different parts of the insn can share the same reload
80register.
81
82When a reload is used for addresses in multiple parts, or when it is
83an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
84a register with any other reload. */
85
86#define REG_OK_STRICT
87
88/* We do not enable this with CHECKING_P, since it is awfully slow. */
89#undef DEBUG_RELOAD
90
91#include "config.h"
92#include "system.h"
93#include "coretypes.h"
94#include "backend.h"
95#include "target.h"
96#include "rtl.h"
97#include "tree.h"
98#include "df.h"
99#include "memmodel.h"
100#include "tm_p.h"
101#include "optabs.h"
102#include "regs.h"
103#include "ira.h"
104#include "recog.h"
105#include "rtl-error.h"
106#include "reload.h"
107#include "addresses.h"
108#include "params.h"
109
110/* True if X is a constant that can be forced into the constant pool.
111 MODE is the mode of the operand, or VOIDmode if not known. */
112#define CONST_POOL_OK_P(MODE, X) \
113 ((MODE) != VOIDmode \
114 && CONSTANT_P (X) \
115 && GET_CODE (X) != HIGH \
116 && !targetm.cannot_force_const_mem (MODE, X))
117
118/* True if C is a non-empty register class that has too few registers
119 to be safely used as a reload target class. */
120
121static inline bool
122small_register_class_p (reg_class_t rclass)
123{
124 return (reg_class_size [(int) rclass] == 1
125 || (reg_class_size [(int) rclass] >= 1
126 && targetm.class_likely_spilled_p (rclass)));
127}
128
129
130/* All reloads of the current insn are recorded here. See reload.h for
131 comments. */
132int n_reloads;
133struct reload rld[MAX_RELOADS];
134
135/* All the "earlyclobber" operands of the current insn
136 are recorded here. */
137int n_earlyclobbers;
138rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
139
140int reload_n_operands;
141
142/* Replacing reloads.
143
144 If `replace_reloads' is nonzero, then as each reload is recorded
145 an entry is made for it in the table `replacements'.
146 Then later `subst_reloads' can look through that table and
147 perform all the replacements needed. */
148
149/* Nonzero means record the places to replace. */
150static int replace_reloads;
151
152/* Each replacement is recorded with a structure like this. */
153struct replacement
154{
155 rtx *where; /* Location to store in */
156 int what; /* which reload this is for */
157 machine_mode mode; /* mode it must have */
158};
159
160static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
161
162/* Number of replacements currently recorded. */
163static int n_replacements;
164
165/* Used to track what is modified by an operand. */
166struct decomposition
167{
168 int reg_flag; /* Nonzero if referencing a register. */
169 int safe; /* Nonzero if this can't conflict with anything. */
170 rtx base; /* Base address for MEM. */
171 HOST_WIDE_INT start; /* Starting offset or register number. */
172 HOST_WIDE_INT end; /* Ending offset or register number. */
173};
174
175/* Save MEMs needed to copy from one class of registers to another. One MEM
176 is used per mode, but normally only one or two modes are ever used.
177
178 We keep two versions, before and after register elimination. The one
179 after register elimination is record separately for each operand. This
180 is done in case the address is not valid to be sure that we separately
181 reload each. */
182
183static rtx secondary_memlocs[NUM_MACHINE_MODES];
184static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
185static int secondary_memlocs_elim_used = 0;
186
187/* The instruction we are doing reloads for;
188 so we can test whether a register dies in it. */
189static rtx_insn *this_insn;
190
191/* Nonzero if this instruction is a user-specified asm with operands. */
192static int this_insn_is_asm;
193
194/* If hard_regs_live_known is nonzero,
195 we can tell which hard regs are currently live,
196 at least enough to succeed in choosing dummy reloads. */
197static int hard_regs_live_known;
198
199/* Indexed by hard reg number,
200 element is nonnegative if hard reg has been spilled.
201 This vector is passed to `find_reloads' as an argument
202 and is not changed here. */
203static short *static_reload_reg_p;
204
205/* Set to 1 in subst_reg_equivs if it changes anything. */
206static int subst_reg_equivs_changed;
207
208/* On return from push_reload, holds the reload-number for the OUT
209 operand, which can be different for that from the input operand. */
210static int output_reloadnum;
211
212 /* Compare two RTX's. */
213#define MATCHES(x, y) \
214 (x == y || (x != 0 && (REG_P (x) \
215 ? REG_P (y) && REGNO (x) == REGNO (y) \
216 : rtx_equal_p (x, y) && ! side_effects_p (x))))
217
218 /* Indicates if two reloads purposes are for similar enough things that we
219 can merge their reloads. */
220#define MERGABLE_RELOADS(when1, when2, op1, op2) \
221 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
222 || ((when1) == (when2) && (op1) == (op2)) \
223 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
224 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
225 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
226 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
227 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
228
229 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
230#define MERGE_TO_OTHER(when1, when2, op1, op2) \
231 ((when1) != (when2) \
232 || ! ((op1) == (op2) \
233 || (when1) == RELOAD_FOR_INPUT \
234 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
235 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
236
237 /* If we are going to reload an address, compute the reload type to
238 use. */
239#define ADDR_TYPE(type) \
240 ((type) == RELOAD_FOR_INPUT_ADDRESS \
241 ? RELOAD_FOR_INPADDR_ADDRESS \
242 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
243 ? RELOAD_FOR_OUTADDR_ADDRESS \
244 : (type)))
245
246static int push_secondary_reload (int, rtx, int, int, enum reg_class,
247 machine_mode, enum reload_type,
248 enum insn_code *, secondary_reload_info *);
249static enum reg_class find_valid_class (machine_mode, machine_mode,
250 int, unsigned int);
251static void push_replacement (rtx *, int, machine_mode);
252static void dup_replacements (rtx *, rtx *);
253static void combine_reloads (void);
254static int find_reusable_reload (rtx *, rtx, enum reg_class,
255 enum reload_type, int, int);
256static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, machine_mode,
257 machine_mode, reg_class_t, int, int);
258static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
259static struct decomposition decompose (rtx);
260static int immune_p (rtx, rtx, struct decomposition);
261static bool alternative_allows_const_pool_ref (rtx, const char *, int);
262static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int,
263 rtx_insn *, int *);
264static rtx make_memloc (rtx, int);
265static int maybe_memory_address_addr_space_p (machine_mode, rtx,
266 addr_space_t, rtx *);
267static int find_reloads_address (machine_mode, rtx *, rtx, rtx *,
268 int, enum reload_type, int, rtx_insn *);
269static rtx subst_reg_equivs (rtx, rtx_insn *);
270static rtx subst_indexed_address (rtx);
271static void update_auto_inc_notes (rtx_insn *, int, int);
272static int find_reloads_address_1 (machine_mode, addr_space_t, rtx, int,
273 enum rtx_code, enum rtx_code, rtx *,
274 int, enum reload_type,int, rtx_insn *);
275static void find_reloads_address_part (rtx, rtx *, enum reg_class,
276 machine_mode, int,
277 enum reload_type, int);
278static rtx find_reloads_subreg_address (rtx, int, enum reload_type,
279 int, rtx_insn *, int *);
280static void copy_replacements_1 (rtx *, rtx *, int);
281static int find_inc_amount (rtx, rtx);
282static int refers_to_mem_for_reload_p (rtx);
283static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
284 rtx, rtx *);
285
286/* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
287 list yet. */
288
289static void
290push_reg_equiv_alt_mem (int regno, rtx mem)
291{
292 rtx it;
293
294 for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
295 if (rtx_equal_p (XEXP (it, 0), mem))
296 return;
297
298 reg_equiv_alt_mem_list (regno)
299 = alloc_EXPR_LIST (REG_EQUIV, mem,
300 reg_equiv_alt_mem_list (regno));
301}
302
303/* Determine if any secondary reloads are needed for loading (if IN_P is
304 nonzero) or storing (if IN_P is zero) X to or from a reload register of
305 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
306 are needed, push them.
307
308 Return the reload number of the secondary reload we made, or -1 if
309 we didn't need one. *PICODE is set to the insn_code to use if we do
310 need a secondary reload. */
311
312static int
313push_secondary_reload (int in_p, rtx x, int opnum, int optional,
314 enum reg_class reload_class,
315 machine_mode reload_mode, enum reload_type type,
316 enum insn_code *picode, secondary_reload_info *prev_sri)
317{
318 enum reg_class rclass = NO_REGS;
319 enum reg_class scratch_class;
320 machine_mode mode = reload_mode;
321 enum insn_code icode = CODE_FOR_nothing;
322 enum insn_code t_icode = CODE_FOR_nothing;
323 enum reload_type secondary_type;
324 int s_reload, t_reload = -1;
325 const char *scratch_constraint;
326 secondary_reload_info sri;
327
328 if (type == RELOAD_FOR_INPUT_ADDRESS
329 || type == RELOAD_FOR_OUTPUT_ADDRESS
330 || type == RELOAD_FOR_INPADDR_ADDRESS
331 || type == RELOAD_FOR_OUTADDR_ADDRESS)
332 secondary_type = type;
333 else
334 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
335
336 *picode = CODE_FOR_nothing;
337
338 /* If X is a paradoxical SUBREG, use the inner value to determine both the
339 mode and object being reloaded. */
340 if (paradoxical_subreg_p (x))
341 {
342 x = SUBREG_REG (x);
343 reload_mode = GET_MODE (x);
344 }
345
346 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
347 is still a pseudo-register by now, it *must* have an equivalent MEM
348 but we don't want to assume that), use that equivalent when seeing if
349 a secondary reload is needed since whether or not a reload is needed
350 might be sensitive to the form of the MEM. */
351
352 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
353 && reg_equiv_mem (REGNO (x)))
354 x = reg_equiv_mem (REGNO (x));
355
356 sri.icode = CODE_FOR_nothing;
357 sri.prev_sri = prev_sri;
358 rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
359 reload_mode, &sri);
360 icode = (enum insn_code) sri.icode;
361
362 /* If we don't need any secondary registers, done. */
363 if (rclass == NO_REGS && icode == CODE_FOR_nothing)
364 return -1;
365
366 if (rclass != NO_REGS)
367 t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
368 reload_mode, type, &t_icode, &sri);
369
370 /* If we will be using an insn, the secondary reload is for a
371 scratch register. */
372
373 if (icode != CODE_FOR_nothing)
374 {
375 /* If IN_P is nonzero, the reload register will be the output in
376 operand 0. If IN_P is zero, the reload register will be the input
377 in operand 1. Outputs should have an initial "=", which we must
378 skip. */
379
380 /* ??? It would be useful to be able to handle only two, or more than
381 three, operands, but for now we can only handle the case of having
382 exactly three: output, input and one temp/scratch. */
383 gcc_assert (insn_data[(int) icode].n_operands == 3);
384
385 /* ??? We currently have no way to represent a reload that needs
386 an icode to reload from an intermediate tertiary reload register.
387 We should probably have a new field in struct reload to tag a
388 chain of scratch operand reloads onto. */
389 gcc_assert (rclass == NO_REGS);
390
391 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
392 gcc_assert (*scratch_constraint == '=');
393 scratch_constraint++;
394 if (*scratch_constraint == '&')
395 scratch_constraint++;
396 scratch_class = (reg_class_for_constraint
397 (lookup_constraint (scratch_constraint)));
398
399 rclass = scratch_class;
400 mode = insn_data[(int) icode].operand[2].mode;
401 }
402
403 /* This case isn't valid, so fail. Reload is allowed to use the same
404 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
405 in the case of a secondary register, we actually need two different
406 registers for correct code. We fail here to prevent the possibility of
407 silently generating incorrect code later.
408
409 The convention is that secondary input reloads are valid only if the
410 secondary_class is different from class. If you have such a case, you
411 can not use secondary reloads, you must work around the problem some
412 other way.
413
414 Allow this when a reload_in/out pattern is being used. I.e. assume
415 that the generated code handles this case. */
416
417 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
418 || t_icode != CODE_FOR_nothing);
419
420 /* See if we can reuse an existing secondary reload. */
421 for (s_reload = 0; s_reload < n_reloads; s_reload++)
422 if (rld[s_reload].secondary_p
423 && (reg_class_subset_p (rclass, rld[s_reload].rclass)
424 || reg_class_subset_p (rld[s_reload].rclass, rclass))
425 && ((in_p && rld[s_reload].inmode == mode)
426 || (! in_p && rld[s_reload].outmode == mode))
427 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
428 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
429 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
430 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
431 && (small_register_class_p (rclass)
432 || targetm.small_register_classes_for_mode_p (VOIDmode))
433 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
434 opnum, rld[s_reload].opnum))
435 {
436 if (in_p)
437 rld[s_reload].inmode = mode;
438 if (! in_p)
439 rld[s_reload].outmode = mode;
440
441 if (reg_class_subset_p (rclass, rld[s_reload].rclass))
442 rld[s_reload].rclass = rclass;
443
444 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
445 rld[s_reload].optional &= optional;
446 rld[s_reload].secondary_p = 1;
447 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
448 opnum, rld[s_reload].opnum))
449 rld[s_reload].when_needed = RELOAD_OTHER;
450
451 break;
452 }
453
454 if (s_reload == n_reloads)
455 {
456 /* If we need a memory location to copy between the two reload regs,
457 set it up now. Note that we do the input case before making
458 the reload and the output case after. This is due to the
459 way reloads are output. */
460
461 if (in_p && icode == CODE_FOR_nothing
462 && targetm.secondary_memory_needed (mode, rclass, reload_class))
463 {
464 get_secondary_mem (x, reload_mode, opnum, type);
465
466 /* We may have just added new reloads. Make sure we add
467 the new reload at the end. */
468 s_reload = n_reloads;
469 }
470
471 /* We need to make a new secondary reload for this register class. */
472 rld[s_reload].in = rld[s_reload].out = 0;
473 rld[s_reload].rclass = rclass;
474
475 rld[s_reload].inmode = in_p ? mode : VOIDmode;
476 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
477 rld[s_reload].reg_rtx = 0;
478 rld[s_reload].optional = optional;
479 rld[s_reload].inc = 0;
480 /* Maybe we could combine these, but it seems too tricky. */
481 rld[s_reload].nocombine = 1;
482 rld[s_reload].in_reg = 0;
483 rld[s_reload].out_reg = 0;
484 rld[s_reload].opnum = opnum;
485 rld[s_reload].when_needed = secondary_type;
486 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
487 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
488 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
489 rld[s_reload].secondary_out_icode
490 = ! in_p ? t_icode : CODE_FOR_nothing;
491 rld[s_reload].secondary_p = 1;
492
493 n_reloads++;
494
495 if (! in_p && icode == CODE_FOR_nothing
496 && targetm.secondary_memory_needed (mode, reload_class, rclass))
497 get_secondary_mem (x, mode, opnum, type);
498 }
499
500 *picode = icode;
501 return s_reload;
502}
503
504/* If a secondary reload is needed, return its class. If both an intermediate
505 register and a scratch register is needed, we return the class of the
506 intermediate register. */
507reg_class_t
508secondary_reload_class (bool in_p, reg_class_t rclass, machine_mode mode,
509 rtx x)
510{
511 enum insn_code icode;
512 secondary_reload_info sri;
513
514 sri.icode = CODE_FOR_nothing;
515 sri.prev_sri = NULL;
516 rclass
517 = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
518 icode = (enum insn_code) sri.icode;
519
520 /* If there are no secondary reloads at all, we return NO_REGS.
521 If an intermediate register is needed, we return its class. */
522 if (icode == CODE_FOR_nothing || rclass != NO_REGS)
523 return rclass;
524
525 /* No intermediate register is needed, but we have a special reload
526 pattern, which we assume for now needs a scratch register. */
527 return scratch_reload_class (icode);
528}
529
530/* ICODE is the insn_code of a reload pattern. Check that it has exactly
531 three operands, verify that operand 2 is an output operand, and return
532 its register class.
533 ??? We'd like to be able to handle any pattern with at least 2 operands,
534 for zero or more scratch registers, but that needs more infrastructure. */
535enum reg_class
536scratch_reload_class (enum insn_code icode)
537{
538 const char *scratch_constraint;
539 enum reg_class rclass;
540
541 gcc_assert (insn_data[(int) icode].n_operands == 3);
542 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
543 gcc_assert (*scratch_constraint == '=');
544 scratch_constraint++;
545 if (*scratch_constraint == '&')
546 scratch_constraint++;
547 rclass = reg_class_for_constraint (lookup_constraint (scratch_constraint));
548 gcc_assert (rclass != NO_REGS);
549 return rclass;
550}
551
552/* Return a memory location that will be used to copy X in mode MODE.
553 If we haven't already made a location for this mode in this insn,
554 call find_reloads_address on the location being returned. */
555
556rtx
557get_secondary_mem (rtx x ATTRIBUTE_UNUSED, machine_mode mode,
558 int opnum, enum reload_type type)
559{
560 rtx loc;
561 int mem_valid;
562
563 /* By default, if MODE is narrower than a word, widen it to a word.
564 This is required because most machines that require these memory
565 locations do not support short load and stores from all registers
566 (e.g., FP registers). */
567
568 mode = targetm.secondary_memory_needed_mode (mode);
569
570 /* If we already have made a MEM for this operand in MODE, return it. */
571 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
572 return secondary_memlocs_elim[(int) mode][opnum];
573
574 /* If this is the first time we've tried to get a MEM for this mode,
575 allocate a new one. `something_changed' in reload will get set
576 by noticing that the frame size has changed. */
577
578 if (secondary_memlocs[(int) mode] == 0)
579 {
580#ifdef SECONDARY_MEMORY_NEEDED_RTX
581 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
582#else
583 secondary_memlocs[(int) mode]
584 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
585#endif
586 }
587
588 /* Get a version of the address doing any eliminations needed. If that
589 didn't give us a new MEM, make a new one if it isn't valid. */
590
591 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
592 mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
593 MEM_ADDR_SPACE (loc));
594
595 if (! mem_valid && loc == secondary_memlocs[(int) mode])
596 loc = copy_rtx (loc);
597
598 /* The only time the call below will do anything is if the stack
599 offset is too large. In that case IND_LEVELS doesn't matter, so we
600 can just pass a zero. Adjust the type to be the address of the
601 corresponding object. If the address was valid, save the eliminated
602 address. If it wasn't valid, we need to make a reload each time, so
603 don't save it. */
604
605 if (! mem_valid)
606 {
607 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
608 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
609 : RELOAD_OTHER);
610
611 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
612 opnum, type, 0, 0);
613 }
614
615 secondary_memlocs_elim[(int) mode][opnum] = loc;
616 if (secondary_memlocs_elim_used <= (int)mode)
617 secondary_memlocs_elim_used = (int)mode + 1;
618 return loc;
619}
620
621/* Clear any secondary memory locations we've made. */
622
623void
624clear_secondary_mem (void)
625{
626 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
627}
628
629
630/* Find the largest class which has at least one register valid in
631 mode INNER, and which for every such register, that register number
632 plus N is also valid in OUTER (if in range) and is cheap to move
633 into REGNO. Such a class must exist. */
634
635static enum reg_class
636find_valid_class (machine_mode outer ATTRIBUTE_UNUSED,
637 machine_mode inner ATTRIBUTE_UNUSED, int n,
638 unsigned int dest_regno ATTRIBUTE_UNUSED)
639{
640 int best_cost = -1;
641 int rclass;
642 int regno;
643 enum reg_class best_class = NO_REGS;
644 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
645 unsigned int best_size = 0;
646 int cost;
647
648 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
649 {
650 int bad = 0;
651 int good = 0;
652 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
653 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
654 {
655 if (targetm.hard_regno_mode_ok (regno, inner))
656 {
657 good = 1;
658 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
659 && !targetm.hard_regno_mode_ok (regno + n, outer))
660 bad = 1;
661 }
662 }
663
664 if (bad || !good)
665 continue;
666 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
667
668 if ((reg_class_size[rclass] > best_size
669 && (best_cost < 0 || best_cost >= cost))
670 || best_cost > cost)
671 {
672 best_class = (enum reg_class) rclass;
673 best_size = reg_class_size[rclass];
674 best_cost = register_move_cost (outer, (enum reg_class) rclass,
675 dest_class);
676 }
677 }
678
679 gcc_assert (best_size != 0);
680
681 return best_class;
682}
683
684/* We are trying to reload a subreg of something that is not a register.
685 Find the largest class which contains only registers valid in
686 mode MODE. OUTER is the mode of the subreg, DEST_CLASS the class in
687 which we would eventually like to obtain the object. */
688
689static enum reg_class
690find_valid_class_1 (machine_mode outer ATTRIBUTE_UNUSED,
691 machine_mode mode ATTRIBUTE_UNUSED,
692 enum reg_class dest_class ATTRIBUTE_UNUSED)
693{
694 int best_cost = -1;
695 int rclass;
696 int regno;
697 enum reg_class best_class = NO_REGS;
698 unsigned int best_size = 0;
699 int cost;
700
701 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
702 {
703 unsigned int computed_rclass_size = 0;
704
705 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
706 {
707 if (in_hard_reg_set_p (reg_class_contents[rclass], mode, regno)
708 && targetm.hard_regno_mode_ok (regno, mode))
709 computed_rclass_size++;
710 }
711
712 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
713
714 if ((computed_rclass_size > best_size
715 && (best_cost < 0 || best_cost >= cost))
716 || best_cost > cost)
717 {
718 best_class = (enum reg_class) rclass;
719 best_size = computed_rclass_size;
720 best_cost = register_move_cost (outer, (enum reg_class) rclass,
721 dest_class);
722 }
723 }
724
725 gcc_assert (best_size != 0);
726
727#ifdef LIMIT_RELOAD_CLASS
728 best_class = LIMIT_RELOAD_CLASS (mode, best_class);
729#endif
730 return best_class;
731}
732
733/* Return the number of a previously made reload that can be combined with
734 a new one, or n_reloads if none of the existing reloads can be used.
735 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
736 push_reload, they determine the kind of the new reload that we try to
737 combine. P_IN points to the corresponding value of IN, which can be
738 modified by this function.
739 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
740
741static int
742find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
743 enum reload_type type, int opnum, int dont_share)
744{
745 rtx in = *p_in;
746 int i;
747 /* We can't merge two reloads if the output of either one is
748 earlyclobbered. */
749
750 if (earlyclobber_operand_p (out))
751 return n_reloads;
752
753 /* We can use an existing reload if the class is right
754 and at least one of IN and OUT is a match
755 and the other is at worst neutral.
756 (A zero compared against anything is neutral.)
757
758 For targets with small register classes, don't use existing reloads
759 unless they are for the same thing since that can cause us to need
760 more reload registers than we otherwise would. */
761
762 for (i = 0; i < n_reloads; i++)
763 if ((reg_class_subset_p (rclass, rld[i].rclass)
764 || reg_class_subset_p (rld[i].rclass, rclass))
765 /* If the existing reload has a register, it must fit our class. */
766 && (rld[i].reg_rtx == 0
767 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
768 true_regnum (rld[i].reg_rtx)))
769 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
770 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
771 || (out != 0 && MATCHES (rld[i].out, out)
772 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
773 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
774 && (small_register_class_p (rclass)
775 || targetm.small_register_classes_for_mode_p (VOIDmode))
776 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
777 return i;
778
779 /* Reloading a plain reg for input can match a reload to postincrement
780 that reg, since the postincrement's value is the right value.
781 Likewise, it can match a preincrement reload, since we regard
782 the preincrementation as happening before any ref in this insn
783 to that register. */
784 for (i = 0; i < n_reloads; i++)
785 if ((reg_class_subset_p (rclass, rld[i].rclass)
786 || reg_class_subset_p (rld[i].rclass, rclass))
787 /* If the existing reload has a register, it must fit our
788 class. */
789 && (rld[i].reg_rtx == 0
790 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
791 true_regnum (rld[i].reg_rtx)))
792 && out == 0 && rld[i].out == 0 && rld[i].in != 0
793 && ((REG_P (in)
794 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
795 && MATCHES (XEXP (rld[i].in, 0), in))
796 || (REG_P (rld[i].in)
797 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
798 && MATCHES (XEXP (in, 0), rld[i].in)))
799 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
800 && (small_register_class_p (rclass)
801 || targetm.small_register_classes_for_mode_p (VOIDmode))
802 && MERGABLE_RELOADS (type, rld[i].when_needed,
803 opnum, rld[i].opnum))
804 {
805 /* Make sure reload_in ultimately has the increment,
806 not the plain register. */
807 if (REG_P (in))
808 *p_in = rld[i].in;
809 return i;
810 }
811 return n_reloads;
812}
813
814/* Return true if X is a SUBREG that will need reloading of its SUBREG_REG
815 expression. MODE is the mode that X will be used in. OUTPUT is true if
816 the function is invoked for the output part of an enclosing reload. */
817
818static bool
819reload_inner_reg_of_subreg (rtx x, machine_mode mode, bool output)
820{
821 rtx inner;
822
823 /* Only SUBREGs are problematical. */
824 if (GET_CODE (x) != SUBREG)
825 return false;
826
827 inner = SUBREG_REG (x);
828
829 /* If INNER is a constant or PLUS, then INNER will need reloading. */
830 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
831 return true;
832
833 /* If INNER is not a hard register, then INNER will not need reloading. */
834 if (!(REG_P (inner) && HARD_REGISTER_P (inner)))
835 return false;
836
837 /* If INNER is not ok for MODE, then INNER will need reloading. */
838 if (!targetm.hard_regno_mode_ok (subreg_regno (x), mode))
839 return true;
840
841 /* If this is for an output, and the outer part is a word or smaller,
842 INNER is larger than a word and the number of registers in INNER is
843 not the same as the number of words in INNER, then INNER will need
844 reloading (with an in-out reload). */
845 return (output
846 && GET_MODE_SIZE (mode) <= UNITS_PER_WORD
847 && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD
848 && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD)
849 != REG_NREGS (inner)));
850}
851
852/* Return nonzero if IN can be reloaded into REGNO with mode MODE without
853 requiring an extra reload register. The caller has already found that
854 IN contains some reference to REGNO, so check that we can produce the
855 new value in a single step. E.g. if we have
856 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
857 instruction that adds one to a register, this should succeed.
858 However, if we have something like
859 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
860 needs to be loaded into a register first, we need a separate reload
861 register.
862 Such PLUS reloads are generated by find_reload_address_part.
863 The out-of-range PLUS expressions are usually introduced in the instruction
864 patterns by register elimination and substituting pseudos without a home
865 by their function-invariant equivalences. */
866static int
867can_reload_into (rtx in, int regno, machine_mode mode)
868{
869 rtx dst;
870 rtx_insn *test_insn;
871 int r = 0;
872 struct recog_data_d save_recog_data;
873
874 /* For matching constraints, we often get notional input reloads where
875 we want to use the original register as the reload register. I.e.
876 technically this is a non-optional input-output reload, but IN is
877 already a valid register, and has been chosen as the reload register.
878 Speed this up, since it trivially works. */
879 if (REG_P (in))
880 return 1;
881
882 /* To test MEMs properly, we'd have to take into account all the reloads
883 that are already scheduled, which can become quite complicated.
884 And since we've already handled address reloads for this MEM, it
885 should always succeed anyway. */
886 if (MEM_P (in))
887 return 1;
888
889 /* If we can make a simple SET insn that does the job, everything should
890 be fine. */
891 dst = gen_rtx_REG (mode, regno);
892 test_insn = make_insn_raw (gen_rtx_SET (dst, in));
893 save_recog_data = recog_data;
894 if (recog_memoized (test_insn) >= 0)
895 {
896 extract_insn (test_insn);
897 r = constrain_operands (1, get_enabled_alternatives (test_insn));
898 }
899 recog_data = save_recog_data;
900 return r;
901}
902
903/* Record one reload that needs to be performed.
904 IN is an rtx saying where the data are to be found before this instruction.
905 OUT says where they must be stored after the instruction.
906 (IN is zero for data not read, and OUT is zero for data not written.)
907 INLOC and OUTLOC point to the places in the instructions where
908 IN and OUT were found.
909 If IN and OUT are both nonzero, it means the same register must be used
910 to reload both IN and OUT.
911
912 RCLASS is a register class required for the reloaded data.
913 INMODE is the machine mode that the instruction requires
914 for the reg that replaces IN and OUTMODE is likewise for OUT.
915
916 If IN is zero, then OUT's location and mode should be passed as
917 INLOC and INMODE.
918
919 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
920
921 OPTIONAL nonzero means this reload does not need to be performed:
922 it can be discarded if that is more convenient.
923
924 OPNUM and TYPE say what the purpose of this reload is.
925
926 The return value is the reload-number for this reload.
927
928 If both IN and OUT are nonzero, in some rare cases we might
929 want to make two separate reloads. (Actually we never do this now.)
930 Therefore, the reload-number for OUT is stored in
931 output_reloadnum when we return; the return value applies to IN.
932 Usually (presently always), when IN and OUT are nonzero,
933 the two reload-numbers are equal, but the caller should be careful to
934 distinguish them. */
935
936int
937push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
938 enum reg_class rclass, machine_mode inmode,
939 machine_mode outmode, int strict_low, int optional,
940 int opnum, enum reload_type type)
941{
942 int i;
943 int dont_share = 0;
944 int dont_remove_subreg = 0;
945#ifdef LIMIT_RELOAD_CLASS
946 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
947#endif
948 int secondary_in_reload = -1, secondary_out_reload = -1;
949 enum insn_code secondary_in_icode = CODE_FOR_nothing;
950 enum insn_code secondary_out_icode = CODE_FOR_nothing;
951 enum reg_class subreg_in_class ATTRIBUTE_UNUSED;
952 subreg_in_class = NO_REGS;
953
954 /* INMODE and/or OUTMODE could be VOIDmode if no mode
955 has been specified for the operand. In that case,
956 use the operand's mode as the mode to reload. */
957 if (inmode == VOIDmode && in != 0)
958 inmode = GET_MODE (in);
959 if (outmode == VOIDmode && out != 0)
960 outmode = GET_MODE (out);
961
962 /* If find_reloads and friends until now missed to replace a pseudo
963 with a constant of reg_equiv_constant something went wrong
964 beforehand.
965 Note that it can't simply be done here if we missed it earlier
966 since the constant might need to be pushed into the literal pool
967 and the resulting memref would probably need further
968 reloading. */
969 if (in != 0 && REG_P (in))
970 {
971 int regno = REGNO (in);
972
973 gcc_assert (regno < FIRST_PSEUDO_REGISTER
974 || reg_renumber[regno] >= 0
975 || reg_equiv_constant (regno) == NULL_RTX);
976 }
977
978 /* reg_equiv_constant only contains constants which are obviously
979 not appropriate as destination. So if we would need to replace
980 the destination pseudo with a constant we are in real
981 trouble. */
982 if (out != 0 && REG_P (out))
983 {
984 int regno = REGNO (out);
985
986 gcc_assert (regno < FIRST_PSEUDO_REGISTER
987 || reg_renumber[regno] >= 0
988 || reg_equiv_constant (regno) == NULL_RTX);
989 }
990
991 /* If we have a read-write operand with an address side-effect,
992 change either IN or OUT so the side-effect happens only once. */
993 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
994 switch (GET_CODE (XEXP (in, 0)))
995 {
996 case POST_INC: case POST_DEC: case POST_MODIFY:
997 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
998 break;
999
1000 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
1001 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
1002 break;
1003
1004 default:
1005 break;
1006 }
1007
1008 /* If we are reloading a (SUBREG constant ...), really reload just the
1009 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
1010 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
1011 a pseudo and hence will become a MEM) with M1 wider than M2 and the
1012 register is a pseudo, also reload the inside expression.
1013 For machines that extend byte loads, do this for any SUBREG of a pseudo
1014 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
1015 M2 is an integral mode that gets extended when loaded.
1016 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1017 where either M1 is not valid for R or M2 is wider than a word but we
1018 only need one register to store an M2-sized quantity in R.
1019 (However, if OUT is nonzero, we need to reload the reg *and*
1020 the subreg, so do nothing here, and let following statement handle it.)
1021
1022 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1023 we can't handle it here because CONST_INT does not indicate a mode.
1024
1025 Similarly, we must reload the inside expression if we have a
1026 STRICT_LOW_PART (presumably, in == out in this case).
1027
1028 Also reload the inner expression if it does not require a secondary
1029 reload but the SUBREG does.
1030
1031 Finally, reload the inner expression if it is a register that is in
1032 the class whose registers cannot be referenced in a different size
1033 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1034 cannot reload just the inside since we might end up with the wrong
1035 register class. But if it is inside a STRICT_LOW_PART, we have
1036 no choice, so we hope we do get the right register class there. */
1037
1038 scalar_int_mode inner_mode;
1039 if (in != 0 && GET_CODE (in) == SUBREG
1040 && (subreg_lowpart_p (in) || strict_low)
1041 && targetm.can_change_mode_class (GET_MODE (SUBREG_REG (in)),
1042 inmode, rclass)
1043 && contains_allocatable_reg_of_mode[rclass][GET_MODE (SUBREG_REG (in))]
1044 && (CONSTANT_P (SUBREG_REG (in))
1045 || GET_CODE (SUBREG_REG (in)) == PLUS
1046 || strict_low
1047 || (((REG_P (SUBREG_REG (in))
1048 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1049 || MEM_P (SUBREG_REG (in)))
1050 && (paradoxical_subreg_p (inmode, GET_MODE (SUBREG_REG (in)))
1051 || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1052 && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (in)),
1053 &inner_mode)
1054 && GET_MODE_SIZE (inner_mode) <= UNITS_PER_WORD
1055 && paradoxical_subreg_p (inmode, inner_mode)
1056 && LOAD_EXTEND_OP (inner_mode) != UNKNOWN)
1057 || (WORD_REGISTER_OPERATIONS
1058 && partial_subreg_p (inmode, GET_MODE (SUBREG_REG (in)))
1059 && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1060 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1061 / UNITS_PER_WORD)))))
1062 || (REG_P (SUBREG_REG (in))
1063 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1064 /* The case where out is nonzero
1065 is handled differently in the following statement. */
1066 && (out == 0 || subreg_lowpart_p (in))
1067 && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1068 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1069 > UNITS_PER_WORD)
1070 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1071 / UNITS_PER_WORD)
1072 != REG_NREGS (SUBREG_REG (in))))
1073 || !targetm.hard_regno_mode_ok (subreg_regno (in), inmode)))
1074 || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1075 && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1076 SUBREG_REG (in))
1077 == NO_REGS))
1078 || (REG_P (SUBREG_REG (in))
1079 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1080 && !REG_CAN_CHANGE_MODE_P (REGNO (SUBREG_REG (in)),
1081 GET_MODE (SUBREG_REG (in)), inmode))))
1082 {
1083#ifdef LIMIT_RELOAD_CLASS
1084 in_subreg_loc = inloc;
1085#endif
1086 inloc = &SUBREG_REG (in);
1087 in = *inloc;
1088
1089 if (!WORD_REGISTER_OPERATIONS
1090 && LOAD_EXTEND_OP (GET_MODE (in)) == UNKNOWN
1091 && MEM_P (in))
1092 /* This is supposed to happen only for paradoxical subregs made by
1093 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1094 gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1095
1096 inmode = GET_MODE (in);
1097 }
1098
1099 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1100 where M1 is not valid for R if it was not handled by the code above.
1101
1102 Similar issue for (SUBREG constant ...) if it was not handled by the
1103 code above. This can happen if SUBREG_BYTE != 0.
1104
1105 However, we must reload the inner reg *as well as* the subreg in
1106 that case. */
1107
1108 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, false))
1109 {
1110 if (REG_P (SUBREG_REG (in)))
1111 subreg_in_class
1112 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1113 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1114 GET_MODE (SUBREG_REG (in)),
1115 SUBREG_BYTE (in),
1116 GET_MODE (in)),
1117 REGNO (SUBREG_REG (in)));
1118 else if (CONSTANT_P (SUBREG_REG (in))
1119 || GET_CODE (SUBREG_REG (in)) == PLUS)
1120 subreg_in_class = find_valid_class_1 (inmode,
1121 GET_MODE (SUBREG_REG (in)),
1122 rclass);
1123
1124 /* This relies on the fact that emit_reload_insns outputs the
1125 instructions for input reloads of type RELOAD_OTHER in the same
1126 order as the reloads. Thus if the outer reload is also of type
1127 RELOAD_OTHER, we are guaranteed that this inner reload will be
1128 output before the outer reload. */
1129 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1130 subreg_in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1131 dont_remove_subreg = 1;
1132 }
1133
1134 /* Similarly for paradoxical and problematical SUBREGs on the output.
1135 Note that there is no reason we need worry about the previous value
1136 of SUBREG_REG (out); even if wider than out, storing in a subreg is
1137 entitled to clobber it all (except in the case of a word mode subreg
1138 or of a STRICT_LOW_PART, in that latter case the constraint should
1139 label it input-output.) */
1140 if (out != 0 && GET_CODE (out) == SUBREG
1141 && (subreg_lowpart_p (out) || strict_low)
1142 && targetm.can_change_mode_class (GET_MODE (SUBREG_REG (out)),
1143 outmode, rclass)
1144 && contains_allocatable_reg_of_mode[rclass][GET_MODE (SUBREG_REG (out))]
1145 && (CONSTANT_P (SUBREG_REG (out))
1146 || strict_low
1147 || (((REG_P (SUBREG_REG (out))
1148 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1149 || MEM_P (SUBREG_REG (out)))
1150 && (paradoxical_subreg_p (outmode, GET_MODE (SUBREG_REG (out)))
1151 || (WORD_REGISTER_OPERATIONS
1152 && partial_subreg_p (outmode, GET_MODE (SUBREG_REG (out)))
1153 && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1154 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1155 / UNITS_PER_WORD)))))
1156 || (REG_P (SUBREG_REG (out))
1157 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1158 /* The case of a word mode subreg
1159 is handled differently in the following statement. */
1160 && ! (GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1161 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1162 > UNITS_PER_WORD))
1163 && !targetm.hard_regno_mode_ok (subreg_regno (out), outmode))
1164 || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1165 && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1166 SUBREG_REG (out))
1167 == NO_REGS))
1168 || (REG_P (SUBREG_REG (out))
1169 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1170 && !REG_CAN_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1171 GET_MODE (SUBREG_REG (out)),
1172 outmode))))
1173 {
1174#ifdef LIMIT_RELOAD_CLASS
1175 out_subreg_loc = outloc;
1176#endif
1177 outloc = &SUBREG_REG (out);
1178 out = *outloc;
1179 gcc_assert (WORD_REGISTER_OPERATIONS || !MEM_P (out)
1180 || GET_MODE_SIZE (GET_MODE (out))
1181 <= GET_MODE_SIZE (outmode));
1182 outmode = GET_MODE (out);
1183 }
1184
1185 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1186 where either M1 is not valid for R or M2 is wider than a word but we
1187 only need one register to store an M2-sized quantity in R.
1188
1189 However, we must reload the inner reg *as well as* the subreg in
1190 that case and the inner reg is an in-out reload. */
1191
1192 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, true))
1193 {
1194 enum reg_class in_out_class
1195 = find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1196 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1197 GET_MODE (SUBREG_REG (out)),
1198 SUBREG_BYTE (out),
1199 GET_MODE (out)),
1200 REGNO (SUBREG_REG (out)));
1201
1202 /* This relies on the fact that emit_reload_insns outputs the
1203 instructions for output reloads of type RELOAD_OTHER in reverse
1204 order of the reloads. Thus if the outer reload is also of type
1205 RELOAD_OTHER, we are guaranteed that this inner reload will be
1206 output after the outer reload. */
1207 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1208 &SUBREG_REG (out), in_out_class, VOIDmode, VOIDmode,
1209 0, 0, opnum, RELOAD_OTHER);
1210 dont_remove_subreg = 1;
1211 }
1212
1213 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1214 if (in != 0 && out != 0 && MEM_P (out)
1215 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1216 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1217 dont_share = 1;
1218
1219 /* If IN is a SUBREG of a hard register, make a new REG. This
1220 simplifies some of the cases below. */
1221
1222 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1223 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1224 && ! dont_remove_subreg)
1225 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1226
1227 /* Similarly for OUT. */
1228 if (out != 0 && GET_CODE (out) == SUBREG
1229 && REG_P (SUBREG_REG (out))
1230 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1231 && ! dont_remove_subreg)
1232 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1233
1234 /* Narrow down the class of register wanted if that is
1235 desirable on this machine for efficiency. */
1236 {
1237 reg_class_t preferred_class = rclass;
1238
1239 if (in != 0)
1240 preferred_class = targetm.preferred_reload_class (in, rclass);
1241
1242 /* Output reloads may need analogous treatment, different in detail. */
1243 if (out != 0)
1244 preferred_class
1245 = targetm.preferred_output_reload_class (out, preferred_class);
1246
1247 /* Discard what the target said if we cannot do it. */
1248 if (preferred_class != NO_REGS
1249 || (optional && type == RELOAD_FOR_OUTPUT))
1250 rclass = (enum reg_class) preferred_class;
1251 }
1252
1253 /* Make sure we use a class that can handle the actual pseudo
1254 inside any subreg. For example, on the 386, QImode regs
1255 can appear within SImode subregs. Although GENERAL_REGS
1256 can handle SImode, QImode needs a smaller class. */
1257#ifdef LIMIT_RELOAD_CLASS
1258 if (in_subreg_loc)
1259 rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1260 else if (in != 0 && GET_CODE (in) == SUBREG)
1261 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1262
1263 if (out_subreg_loc)
1264 rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1265 if (out != 0 && GET_CODE (out) == SUBREG)
1266 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1267#endif
1268
1269 /* Verify that this class is at least possible for the mode that
1270 is specified. */
1271 if (this_insn_is_asm)
1272 {
1273 machine_mode mode;
1274 if (paradoxical_subreg_p (inmode, outmode))
1275 mode = inmode;
1276 else
1277 mode = outmode;
1278 if (mode == VOIDmode)
1279 {
1280 error_for_asm (this_insn, "cannot reload integer constant "
1281 "operand in %<asm%>");
1282 mode = word_mode;
1283 if (in != 0)
1284 inmode = word_mode;
1285 if (out != 0)
1286 outmode = word_mode;
1287 }
1288 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1289 if (targetm.hard_regno_mode_ok (i, mode)
1290 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1291 break;
1292 if (i == FIRST_PSEUDO_REGISTER)
1293 {
1294 error_for_asm (this_insn, "impossible register constraint "
1295 "in %<asm%>");
1296 /* Avoid further trouble with this insn. */
1297 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1298 /* We used to continue here setting class to ALL_REGS, but it triggers
1299 sanity check on i386 for:
1300 void foo(long double d)
1301 {
1302 asm("" :: "a" (d));
1303 }
1304 Returning zero here ought to be safe as we take care in
1305 find_reloads to not process the reloads when instruction was
1306 replaced by USE. */
1307
1308 return 0;
1309 }
1310 }
1311
1312 /* Optional output reloads are always OK even if we have no register class,
1313 since the function of these reloads is only to have spill_reg_store etc.
1314 set, so that the storing insn can be deleted later. */
1315 gcc_assert (rclass != NO_REGS
1316 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1317
1318 i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1319
1320 if (i == n_reloads)
1321 {
1322 /* See if we need a secondary reload register to move between CLASS
1323 and IN or CLASS and OUT. Get the icode and push any required reloads
1324 needed for each of them if so. */
1325
1326 if (in != 0)
1327 secondary_in_reload
1328 = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1329 &secondary_in_icode, NULL);
1330 if (out != 0 && GET_CODE (out) != SCRATCH)
1331 secondary_out_reload
1332 = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1333 type, &secondary_out_icode, NULL);
1334
1335 /* We found no existing reload suitable for re-use.
1336 So add an additional reload. */
1337
1338 if (subreg_in_class == NO_REGS
1339 && in != 0
1340 && (REG_P (in)
1341 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1342 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER)
1343 subreg_in_class = REGNO_REG_CLASS (reg_or_subregno (in));
1344 /* If a memory location is needed for the copy, make one. */
1345 if (subreg_in_class != NO_REGS
1346 && targetm.secondary_memory_needed (inmode, subreg_in_class, rclass))
1347 get_secondary_mem (in, inmode, opnum, type);
1348
1349 i = n_reloads;
1350 rld[i].in = in;
1351 rld[i].out = out;
1352 rld[i].rclass = rclass;
1353 rld[i].inmode = inmode;
1354 rld[i].outmode = outmode;
1355 rld[i].reg_rtx = 0;
1356 rld[i].optional = optional;
1357 rld[i].inc = 0;
1358 rld[i].nocombine = 0;
1359 rld[i].in_reg = inloc ? *inloc : 0;
1360 rld[i].out_reg = outloc ? *outloc : 0;
1361 rld[i].opnum = opnum;
1362 rld[i].when_needed = type;
1363 rld[i].secondary_in_reload = secondary_in_reload;
1364 rld[i].secondary_out_reload = secondary_out_reload;
1365 rld[i].secondary_in_icode = secondary_in_icode;
1366 rld[i].secondary_out_icode = secondary_out_icode;
1367 rld[i].secondary_p = 0;
1368
1369 n_reloads++;
1370
1371 if (out != 0
1372 && (REG_P (out)
1373 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1374 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1375 && (targetm.secondary_memory_needed
1376 (outmode, rclass, REGNO_REG_CLASS (reg_or_subregno (out)))))
1377 get_secondary_mem (out, outmode, opnum, type);
1378 }
1379 else
1380 {
1381 /* We are reusing an existing reload,
1382 but we may have additional information for it.
1383 For example, we may now have both IN and OUT
1384 while the old one may have just one of them. */
1385
1386 /* The modes can be different. If they are, we want to reload in
1387 the larger mode, so that the value is valid for both modes. */
1388 if (inmode != VOIDmode
1389 && partial_subreg_p (rld[i].inmode, inmode))
1390 rld[i].inmode = inmode;
1391 if (outmode != VOIDmode
1392 && partial_subreg_p (rld[i].outmode, outmode))
1393 rld[i].outmode = outmode;
1394 if (in != 0)
1395 {
1396 rtx in_reg = inloc ? *inloc : 0;
1397 /* If we merge reloads for two distinct rtl expressions that
1398 are identical in content, there might be duplicate address
1399 reloads. Remove the extra set now, so that if we later find
1400 that we can inherit this reload, we can get rid of the
1401 address reloads altogether.
1402
1403 Do not do this if both reloads are optional since the result
1404 would be an optional reload which could potentially leave
1405 unresolved address replacements.
1406
1407 It is not sufficient to call transfer_replacements since
1408 choose_reload_regs will remove the replacements for address
1409 reloads of inherited reloads which results in the same
1410 problem. */
1411 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1412 && ! (rld[i].optional && optional))
1413 {
1414 /* We must keep the address reload with the lower operand
1415 number alive. */
1416 if (opnum > rld[i].opnum)
1417 {
1418 remove_address_replacements (in);
1419 in = rld[i].in;
1420 in_reg = rld[i].in_reg;
1421 }
1422 else
1423 remove_address_replacements (rld[i].in);
1424 }
1425 /* When emitting reloads we don't necessarily look at the in-
1426 and outmode, but also directly at the operands (in and out).
1427 So we can't simply overwrite them with whatever we have found
1428 for this (to-be-merged) reload, we have to "merge" that too.
1429 Reusing another reload already verified that we deal with the
1430 same operands, just possibly in different modes. So we
1431 overwrite the operands only when the new mode is larger.
1432 See also PR33613. */
1433 if (!rld[i].in
1434 || partial_subreg_p (GET_MODE (rld[i].in), GET_MODE (in)))
1435 rld[i].in = in;
1436 if (!rld[i].in_reg
1437 || (in_reg
1438 && partial_subreg_p (GET_MODE (rld[i].in_reg),
1439 GET_MODE (in_reg))))
1440 rld[i].in_reg = in_reg;
1441 }
1442 if (out != 0)
1443 {
1444 if (!rld[i].out
1445 || (out
1446 && partial_subreg_p (GET_MODE (rld[i].out),
1447 GET_MODE (out))))
1448 rld[i].out = out;
1449 if (outloc
1450 && (!rld[i].out_reg
1451 || partial_subreg_p (GET_MODE (rld[i].out_reg),
1452 GET_MODE (*outloc))))
1453 rld[i].out_reg = *outloc;
1454 }
1455 if (reg_class_subset_p (rclass, rld[i].rclass))
1456 rld[i].rclass = rclass;
1457 rld[i].optional &= optional;
1458 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1459 opnum, rld[i].opnum))
1460 rld[i].when_needed = RELOAD_OTHER;
1461 rld[i].opnum = MIN (rld[i].opnum, opnum);
1462 }
1463
1464 /* If the ostensible rtx being reloaded differs from the rtx found
1465 in the location to substitute, this reload is not safe to combine
1466 because we cannot reliably tell whether it appears in the insn. */
1467
1468 if (in != 0 && in != *inloc)
1469 rld[i].nocombine = 1;
1470
1471#if 0
1472 /* This was replaced by changes in find_reloads_address_1 and the new
1473 function inc_for_reload, which go with a new meaning of reload_inc. */
1474
1475 /* If this is an IN/OUT reload in an insn that sets the CC,
1476 it must be for an autoincrement. It doesn't work to store
1477 the incremented value after the insn because that would clobber the CC.
1478 So we must do the increment of the value reloaded from,
1479 increment it, store it back, then decrement again. */
1480 if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1481 {
1482 out = 0;
1483 rld[i].out = 0;
1484 rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1485 /* If we did not find a nonzero amount-to-increment-by,
1486 that contradicts the belief that IN is being incremented
1487 in an address in this insn. */
1488 gcc_assert (rld[i].inc != 0);
1489 }
1490#endif
1491
1492 /* If we will replace IN and OUT with the reload-reg,
1493 record where they are located so that substitution need
1494 not do a tree walk. */
1495
1496 if (replace_reloads)
1497 {
1498 if (inloc != 0)
1499 {
1500 struct replacement *r = &replacements[n_replacements++];
1501 r->what = i;
1502 r->where = inloc;
1503 r->mode = inmode;
1504 }
1505 if (outloc != 0 && outloc != inloc)
1506 {
1507 struct replacement *r = &replacements[n_replacements++];
1508 r->what = i;
1509 r->where = outloc;
1510 r->mode = outmode;
1511 }
1512 }
1513
1514 /* If this reload is just being introduced and it has both
1515 an incoming quantity and an outgoing quantity that are
1516 supposed to be made to match, see if either one of the two
1517 can serve as the place to reload into.
1518
1519 If one of them is acceptable, set rld[i].reg_rtx
1520 to that one. */
1521
1522 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1523 {
1524 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1525 inmode, outmode,
1526 rld[i].rclass, i,
1527 earlyclobber_operand_p (out));
1528
1529 /* If the outgoing register already contains the same value
1530 as the incoming one, we can dispense with loading it.
1531 The easiest way to tell the caller that is to give a phony
1532 value for the incoming operand (same as outgoing one). */
1533 if (rld[i].reg_rtx == out
1534 && (REG_P (in) || CONSTANT_P (in))
1535 && 0 != find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1536 static_reload_reg_p, i, inmode))
1537 rld[i].in = out;
1538 }
1539
1540 /* If this is an input reload and the operand contains a register that
1541 dies in this insn and is used nowhere else, see if it is the right class
1542 to be used for this reload. Use it if so. (This occurs most commonly
1543 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1544 this if it is also an output reload that mentions the register unless
1545 the output is a SUBREG that clobbers an entire register.
1546
1547 Note that the operand might be one of the spill regs, if it is a
1548 pseudo reg and we are in a block where spilling has not taken place.
1549 But if there is no spilling in this block, that is OK.
1550 An explicitly used hard reg cannot be a spill reg. */
1551
1552 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1553 {
1554 rtx note;
1555 int regno;
1556 machine_mode rel_mode = inmode;
1557
1558 if (out && partial_subreg_p (rel_mode, outmode))
1559 rel_mode = outmode;
1560
1561 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1562 if (REG_NOTE_KIND (note) == REG_DEAD
1563 && REG_P (XEXP (note, 0))
1564 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1565 && reg_mentioned_p (XEXP (note, 0), in)
1566 /* Check that a former pseudo is valid; see find_dummy_reload. */
1567 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1568 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1569 ORIGINAL_REGNO (XEXP (note, 0)))
1570 && REG_NREGS (XEXP (note, 0)) == 1))
1571 && ! refers_to_regno_for_reload_p (regno,
1572 end_hard_regno (rel_mode,
1573 regno),
1574 PATTERN (this_insn), inloc)
1575 && ! find_reg_fusage (this_insn, USE, XEXP (note, 0))
1576 /* If this is also an output reload, IN cannot be used as
1577 the reload register if it is set in this insn unless IN
1578 is also OUT. */
1579 && (out == 0 || in == out
1580 || ! hard_reg_set_here_p (regno,
1581 end_hard_regno (rel_mode, regno),
1582 PATTERN (this_insn)))
1583 /* ??? Why is this code so different from the previous?
1584 Is there any simple coherent way to describe the two together?
1585 What's going on here. */
1586 && (in != out
1587 || (GET_CODE (in) == SUBREG
1588 && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1589 / UNITS_PER_WORD)
1590 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1591 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1592 /* Make sure the operand fits in the reg that dies. */
1593 && (GET_MODE_SIZE (rel_mode)
1594 <= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1595 && targetm.hard_regno_mode_ok (regno, inmode)
1596 && targetm.hard_regno_mode_ok (regno, outmode))
1597 {
1598 unsigned int offs;
1599 unsigned int nregs = MAX (hard_regno_nregs (regno, inmode),
1600 hard_regno_nregs (regno, outmode));
1601
1602 for (offs = 0; offs < nregs; offs++)
1603 if (fixed_regs[regno + offs]
1604 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1605 regno + offs))
1606 break;
1607
1608 if (offs == nregs
1609 && (! (refers_to_regno_for_reload_p
1610 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1611 || can_reload_into (in, regno, inmode)))
1612 {
1613 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1614 break;
1615 }
1616 }
1617 }
1618
1619 if (out)
1620 output_reloadnum = i;
1621
1622 return i;
1623}
1624
1625/* Record an additional place we must replace a value
1626 for which we have already recorded a reload.
1627 RELOADNUM is the value returned by push_reload
1628 when the reload was recorded.
1629 This is used in insn patterns that use match_dup. */
1630
1631static void
1632push_replacement (rtx *loc, int reloadnum, machine_mode mode)
1633{
1634 if (replace_reloads)
1635 {
1636 struct replacement *r = &replacements[n_replacements++];
1637 r->what = reloadnum;
1638 r->where = loc;
1639 r->mode = mode;
1640 }
1641}
1642
1643/* Duplicate any replacement we have recorded to apply at
1644 location ORIG_LOC to also be performed at DUP_LOC.
1645 This is used in insn patterns that use match_dup. */
1646
1647static void
1648dup_replacements (rtx *dup_loc, rtx *orig_loc)
1649{
1650 int i, n = n_replacements;
1651
1652 for (i = 0; i < n; i++)
1653 {
1654 struct replacement *r = &replacements[i];
1655 if (r->where == orig_loc)
1656 push_replacement (dup_loc, r->what, r->mode);
1657 }
1658}
1659
1660/* Transfer all replacements that used to be in reload FROM to be in
1661 reload TO. */
1662
1663void
1664transfer_replacements (int to, int from)
1665{
1666 int i;
1667
1668 for (i = 0; i < n_replacements; i++)
1669 if (replacements[i].what == from)
1670 replacements[i].what = to;
1671}
1672
1673/* IN_RTX is the value loaded by a reload that we now decided to inherit,
1674 or a subpart of it. If we have any replacements registered for IN_RTX,
1675 cancel the reloads that were supposed to load them.
1676 Return nonzero if we canceled any reloads. */
1677int
1678remove_address_replacements (rtx in_rtx)
1679{
1680 int i, j;
1681 char reload_flags[MAX_RELOADS];
1682 int something_changed = 0;
1683
1684 memset (reload_flags, 0, sizeof reload_flags);
1685 for (i = 0, j = 0; i < n_replacements; i++)
1686 {
1687 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1688 reload_flags[replacements[i].what] |= 1;
1689 else
1690 {
1691 replacements[j++] = replacements[i];
1692 reload_flags[replacements[i].what] |= 2;
1693 }
1694 }
1695 /* Note that the following store must be done before the recursive calls. */
1696 n_replacements = j;
1697
1698 for (i = n_reloads - 1; i >= 0; i--)
1699 {
1700 if (reload_flags[i] == 1)
1701 {
1702 deallocate_reload_reg (i);
1703 remove_address_replacements (rld[i].in);
1704 rld[i].in = 0;
1705 something_changed = 1;
1706 }
1707 }
1708 return something_changed;
1709}
1710
1711/* If there is only one output reload, and it is not for an earlyclobber
1712 operand, try to combine it with a (logically unrelated) input reload
1713 to reduce the number of reload registers needed.
1714
1715 This is safe if the input reload does not appear in
1716 the value being output-reloaded, because this implies
1717 it is not needed any more once the original insn completes.
1718
1719 If that doesn't work, see we can use any of the registers that
1720 die in this insn as a reload register. We can if it is of the right
1721 class and does not appear in the value being output-reloaded. */
1722
1723static void
1724combine_reloads (void)
1725{
1726 int i, regno;
1727 int output_reload = -1;
1728 int secondary_out = -1;
1729 rtx note;
1730
1731 /* Find the output reload; return unless there is exactly one
1732 and that one is mandatory. */
1733
1734 for (i = 0; i < n_reloads; i++)
1735 if (rld[i].out != 0)
1736 {
1737 if (output_reload >= 0)
1738 return;
1739 output_reload = i;
1740 }
1741
1742 if (output_reload < 0 || rld[output_reload].optional)
1743 return;
1744
1745 /* An input-output reload isn't combinable. */
1746
1747 if (rld[output_reload].in != 0)
1748 return;
1749
1750 /* If this reload is for an earlyclobber operand, we can't do anything. */
1751 if (earlyclobber_operand_p (rld[output_reload].out))
1752 return;
1753
1754 /* If there is a reload for part of the address of this operand, we would
1755 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1756 its life to the point where doing this combine would not lower the
1757 number of spill registers needed. */
1758 for (i = 0; i < n_reloads; i++)
1759 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1760 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1761 && rld[i].opnum == rld[output_reload].opnum)
1762 return;
1763
1764 /* Check each input reload; can we combine it? */
1765
1766 for (i = 0; i < n_reloads; i++)
1767 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1768 /* Life span of this reload must not extend past main insn. */
1769 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1770 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1771 && rld[i].when_needed != RELOAD_OTHER
1772 && (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode]
1773 == ira_reg_class_max_nregs [(int) rld[output_reload].rclass]
1774 [(int) rld[output_reload].outmode])
1775 && rld[i].inc == 0
1776 && rld[i].reg_rtx == 0
1777 /* Don't combine two reloads with different secondary
1778 memory locations. */
1779 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1780 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1781 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1782 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1783 && (targetm.small_register_classes_for_mode_p (VOIDmode)
1784 ? (rld[i].rclass == rld[output_reload].rclass)
1785 : (reg_class_subset_p (rld[i].rclass,
1786 rld[output_reload].rclass)
1787 || reg_class_subset_p (rld[output_reload].rclass,
1788 rld[i].rclass)))
1789 && (MATCHES (rld[i].in, rld[output_reload].out)
1790 /* Args reversed because the first arg seems to be
1791 the one that we imagine being modified
1792 while the second is the one that might be affected. */
1793 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1794 rld[i].in)
1795 /* However, if the input is a register that appears inside
1796 the output, then we also can't share.
1797 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1798 If the same reload reg is used for both reg 69 and the
1799 result to be stored in memory, then that result
1800 will clobber the address of the memory ref. */
1801 && ! (REG_P (rld[i].in)
1802 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1803 rld[output_reload].out))))
1804 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1805 rld[i].when_needed != RELOAD_FOR_INPUT)
1806 && (reg_class_size[(int) rld[i].rclass]
1807 || targetm.small_register_classes_for_mode_p (VOIDmode))
1808 /* We will allow making things slightly worse by combining an
1809 input and an output, but no worse than that. */
1810 && (rld[i].when_needed == RELOAD_FOR_INPUT
1811 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1812 {
1813 int j;
1814
1815 /* We have found a reload to combine with! */
1816 rld[i].out = rld[output_reload].out;
1817 rld[i].out_reg = rld[output_reload].out_reg;
1818 rld[i].outmode = rld[output_reload].outmode;
1819 /* Mark the old output reload as inoperative. */
1820 rld[output_reload].out = 0;
1821 /* The combined reload is needed for the entire insn. */
1822 rld[i].when_needed = RELOAD_OTHER;
1823 /* If the output reload had a secondary reload, copy it. */
1824 if (rld[output_reload].secondary_out_reload != -1)
1825 {
1826 rld[i].secondary_out_reload
1827 = rld[output_reload].secondary_out_reload;
1828 rld[i].secondary_out_icode
1829 = rld[output_reload].secondary_out_icode;
1830 }
1831
1832 /* Copy any secondary MEM. */
1833 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1834 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1835 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1836 /* If required, minimize the register class. */
1837 if (reg_class_subset_p (rld[output_reload].rclass,
1838 rld[i].rclass))
1839 rld[i].rclass = rld[output_reload].rclass;
1840
1841 /* Transfer all replacements from the old reload to the combined. */
1842 for (j = 0; j < n_replacements; j++)
1843 if (replacements[j].what == output_reload)
1844 replacements[j].what = i;
1845
1846 return;
1847 }
1848
1849 /* If this insn has only one operand that is modified or written (assumed
1850 to be the first), it must be the one corresponding to this reload. It
1851 is safe to use anything that dies in this insn for that output provided
1852 that it does not occur in the output (we already know it isn't an
1853 earlyclobber. If this is an asm insn, give up. */
1854
1855 if (INSN_CODE (this_insn) == -1)
1856 return;
1857
1858 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1859 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1860 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1861 return;
1862
1863 /* See if some hard register that dies in this insn and is not used in
1864 the output is the right class. Only works if the register we pick
1865 up can fully hold our output reload. */
1866 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1867 if (REG_NOTE_KIND (note) == REG_DEAD
1868 && REG_P (XEXP (note, 0))
1869 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1870 rld[output_reload].out)
1871 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1872 && targetm.hard_regno_mode_ok (regno, rld[output_reload].outmode)
1873 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1874 regno)
1875 && (hard_regno_nregs (regno, rld[output_reload].outmode)
1876 <= REG_NREGS (XEXP (note, 0)))
1877 /* Ensure that a secondary or tertiary reload for this output
1878 won't want this register. */
1879 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1880 || (!(TEST_HARD_REG_BIT
1881 (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1882 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1883 || !(TEST_HARD_REG_BIT
1884 (reg_class_contents[(int) rld[secondary_out].rclass],
1885 regno)))))
1886 && !fixed_regs[regno]
1887 /* Check that a former pseudo is valid; see find_dummy_reload. */
1888 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1889 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1890 ORIGINAL_REGNO (XEXP (note, 0)))
1891 && REG_NREGS (XEXP (note, 0)) == 1)))
1892 {
1893 rld[output_reload].reg_rtx
1894 = gen_rtx_REG (rld[output_reload].outmode, regno);
1895 return;
1896 }
1897}
1898
1899/* Try to find a reload register for an in-out reload (expressions IN and OUT).
1900 See if one of IN and OUT is a register that may be used;
1901 this is desirable since a spill-register won't be needed.
1902 If so, return the register rtx that proves acceptable.
1903
1904 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1905 RCLASS is the register class required for the reload.
1906
1907 If FOR_REAL is >= 0, it is the number of the reload,
1908 and in some cases when it can be discovered that OUT doesn't need
1909 to be computed, clear out rld[FOR_REAL].out.
1910
1911 If FOR_REAL is -1, this should not be done, because this call
1912 is just to see if a register can be found, not to find and install it.
1913
1914 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1915 puts an additional constraint on being able to use IN for OUT since
1916 IN must not appear elsewhere in the insn (it is assumed that IN itself
1917 is safe from the earlyclobber). */
1918
1919static rtx
1920find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1921 machine_mode inmode, machine_mode outmode,
1922 reg_class_t rclass, int for_real, int earlyclobber)
1923{
1924 rtx in = real_in;
1925 rtx out = real_out;
1926 int in_offset = 0;
1927 int out_offset = 0;
1928 rtx value = 0;
1929
1930 /* If operands exceed a word, we can't use either of them
1931 unless they have the same size. */
1932 if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
1933 && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
1934 || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
1935 return 0;
1936
1937 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1938 respectively refers to a hard register. */
1939
1940 /* Find the inside of any subregs. */
1941 while (GET_CODE (out) == SUBREG)
1942 {
1943 if (REG_P (SUBREG_REG (out))
1944 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1945 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1946 GET_MODE (SUBREG_REG (out)),
1947 SUBREG_BYTE (out),
1948 GET_MODE (out));
1949 out = SUBREG_REG (out);
1950 }
1951 while (GET_CODE (in) == SUBREG)
1952 {
1953 if (REG_P (SUBREG_REG (in))
1954 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
1955 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
1956 GET_MODE (SUBREG_REG (in)),
1957 SUBREG_BYTE (in),
1958 GET_MODE (in));
1959 in = SUBREG_REG (in);
1960 }
1961
1962 /* Narrow down the reg class, the same way push_reload will;
1963 otherwise we might find a dummy now, but push_reload won't. */
1964 {
1965 reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
1966 if (preferred_class != NO_REGS)
1967 rclass = (enum reg_class) preferred_class;
1968 }
1969
1970 /* See if OUT will do. */
1971 if (REG_P (out)
1972 && REGNO (out) < FIRST_PSEUDO_REGISTER)
1973 {
1974 unsigned int regno = REGNO (out) + out_offset;
1975 unsigned int nwords = hard_regno_nregs (regno, outmode);
1976 rtx saved_rtx;
1977
1978 /* When we consider whether the insn uses OUT,
1979 ignore references within IN. They don't prevent us
1980 from copying IN into OUT, because those refs would
1981 move into the insn that reloads IN.
1982
1983 However, we only ignore IN in its role as this reload.
1984 If the insn uses IN elsewhere and it contains OUT,
1985 that counts. We can't be sure it's the "same" operand
1986 so it might not go through this reload.
1987
1988 We also need to avoid using OUT if it, or part of it, is a
1989 fixed register. Modifying such registers, even transiently,
1990 may have undefined effects on the machine, such as modifying
1991 the stack pointer. */
1992 saved_rtx = *inloc;
1993 *inloc = const0_rtx;
1994
1995 if (regno < FIRST_PSEUDO_REGISTER
1996 && targetm.hard_regno_mode_ok (regno, outmode)
1997 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
1998 PATTERN (this_insn), outloc))
1999 {
2000 unsigned int i;
2001
2002 for (i = 0; i < nwords; i++)
2003 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2004 regno + i)
2005 || fixed_regs[regno + i])
2006 break;
2007
2008 if (i == nwords)
2009 {
2010 if (REG_P (real_out))
2011 value = real_out;
2012 else
2013 value = gen_rtx_REG (outmode, regno);
2014 }
2015 }
2016
2017 *inloc = saved_rtx;
2018 }
2019
2020 /* Consider using IN if OUT was not acceptable
2021 or if OUT dies in this insn (like the quotient in a divmod insn).
2022 We can't use IN unless it is dies in this insn,
2023 which means we must know accurately which hard regs are live.
2024 Also, the result can't go in IN if IN is used within OUT,
2025 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2026 if (hard_regs_live_known
2027 && REG_P (in)
2028 && REGNO (in) < FIRST_PSEUDO_REGISTER
2029 && (value == 0
2030 || find_reg_note (this_insn, REG_UNUSED, real_out))
2031 && find_reg_note (this_insn, REG_DEAD, real_in)
2032 && !fixed_regs[REGNO (in)]
2033 && targetm.hard_regno_mode_ok (REGNO (in),
2034 /* The only case where out and real_out
2035 might have different modes is where
2036 real_out is a subreg, and in that
2037 case, out has a real mode. */
2038 (GET_MODE (out) != VOIDmode
2039 ? GET_MODE (out) : outmode))
2040 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2041 /* However only do this if we can be sure that this input
2042 operand doesn't correspond with an uninitialized pseudo.
2043 global can assign some hardreg to it that is the same as
2044 the one assigned to a different, also live pseudo (as it
2045 can ignore the conflict). We must never introduce writes
2046 to such hardregs, as they would clobber the other live
2047 pseudo. See PR 20973. */
2048 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
2049 ORIGINAL_REGNO (in))
2050 /* Similarly, only do this if we can be sure that the death
2051 note is still valid. global can assign some hardreg to
2052 the pseudo referenced in the note and simultaneously a
2053 subword of this hardreg to a different, also live pseudo,
2054 because only another subword of the hardreg is actually
2055 used in the insn. This cannot happen if the pseudo has
2056 been assigned exactly one hardreg. See PR 33732. */
2057 && REG_NREGS (in) == 1)))
2058 {
2059 unsigned int regno = REGNO (in) + in_offset;
2060 unsigned int nwords = hard_regno_nregs (regno, inmode);
2061
2062 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2063 && ! hard_reg_set_here_p (regno, regno + nwords,
2064 PATTERN (this_insn))
2065 && (! earlyclobber
2066 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2067 PATTERN (this_insn), inloc)))
2068 {
2069 unsigned int i;
2070
2071 for (i = 0; i < nwords; i++)
2072 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2073 regno + i))
2074 break;
2075
2076 if (i == nwords)
2077 {
2078 /* If we were going to use OUT as the reload reg
2079 and changed our mind, it means OUT is a dummy that
2080 dies here. So don't bother copying value to it. */
2081 if (for_real >= 0 && value == real_out)
2082 rld[for_real].out = 0;
2083 if (REG_P (real_in))
2084 value = real_in;
2085 else
2086 value = gen_rtx_REG (inmode, regno);
2087 }
2088 }
2089 }
2090
2091 return value;
2092}
2093
2094/* This page contains subroutines used mainly for determining
2095 whether the IN or an OUT of a reload can serve as the
2096 reload register. */
2097
2098/* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2099
2100int
2101earlyclobber_operand_p (rtx x)
2102{
2103 int i;
2104
2105 for (i = 0; i < n_earlyclobbers; i++)
2106 if (reload_earlyclobbers[i] == x)
2107 return 1;
2108
2109 return 0;
2110}
2111
2112/* Return 1 if expression X alters a hard reg in the range
2113 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2114 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2115 X should be the body of an instruction. */
2116
2117static int
2118hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2119{
2120 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2121 {
2122 rtx op0 = SET_DEST (x);
2123
2124 while (GET_CODE (op0) == SUBREG)
2125 op0 = SUBREG_REG (op0);
2126 if (REG_P (op0))
2127 {
2128 unsigned int r = REGNO (op0);
2129
2130 /* See if this reg overlaps range under consideration. */
2131 if (r < end_regno
2132 && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2133 return 1;
2134 }
2135 }
2136 else if (GET_CODE (x) == PARALLEL)
2137 {
2138 int i = XVECLEN (x, 0) - 1;
2139
2140 for (; i >= 0; i--)
2141 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2142 return 1;
2143 }
2144
2145 return 0;
2146}
2147
2148/* Return 1 if ADDR is a valid memory address for mode MODE
2149 in address space AS, and check that each pseudo reg has the
2150 proper kind of hard reg. */
2151
2152int
2153strict_memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
2154 rtx addr, addr_space_t as)
2155{
2156#ifdef GO_IF_LEGITIMATE_ADDRESS
2157 gcc_assert (ADDR_SPACE_GENERIC_P (as));
2158 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2159 return 0;
2160
2161 win:
2162 return 1;
2163#else
2164 return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2165#endif
2166}
2167
2168/* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2169 if they are the same hard reg, and has special hacks for
2170 autoincrement and autodecrement.
2171 This is specifically intended for find_reloads to use
2172 in determining whether two operands match.
2173 X is the operand whose number is the lower of the two.
2174
2175 The value is 2 if Y contains a pre-increment that matches
2176 a non-incrementing address in X. */
2177
2178/* ??? To be completely correct, we should arrange to pass
2179 for X the output operand and for Y the input operand.
2180 For now, we assume that the output operand has the lower number
2181 because that is natural in (SET output (... input ...)). */
2182
2183int
2184operands_match_p (rtx x, rtx y)
2185{
2186 int i;
2187 RTX_CODE code = GET_CODE (x);
2188 const char *fmt;
2189 int success_2;
2190
2191 if (x == y)
2192 return 1;
2193 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2194 && (REG_P (y) || (GET_CODE (y) == SUBREG
2195 && REG_P (SUBREG_REG (y)))))
2196 {
2197 int j;
2198
2199 if (code == SUBREG)
2200 {
2201 i = REGNO (SUBREG_REG (x));
2202 if (i >= FIRST_PSEUDO_REGISTER)
2203 goto slow;
2204 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2205 GET_MODE (SUBREG_REG (x)),
2206 SUBREG_BYTE (x),
2207 GET_MODE (x));
2208 }
2209 else
2210 i = REGNO (x);
2211
2212 if (GET_CODE (y) == SUBREG)
2213 {
2214 j = REGNO (SUBREG_REG (y));
2215 if (j >= FIRST_PSEUDO_REGISTER)
2216 goto slow;
2217 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2218 GET_MODE (SUBREG_REG (y)),
2219 SUBREG_BYTE (y),
2220 GET_MODE (y));
2221 }
2222 else
2223 j = REGNO (y);
2224
2225 /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2226 multiple hard register group of scalar integer registers, so that
2227 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2228 register. */
2229 scalar_int_mode xmode;
2230 if (REG_WORDS_BIG_ENDIAN
2231 && is_a <scalar_int_mode> (GET_MODE (x), &xmode)
2232 && GET_MODE_SIZE (xmode) > UNITS_PER_WORD
2233 && i < FIRST_PSEUDO_REGISTER)
2234 i += hard_regno_nregs (i, xmode) - 1;
2235 scalar_int_mode ymode;
2236 if (REG_WORDS_BIG_ENDIAN
2237 && is_a <scalar_int_mode> (GET_MODE (y), &ymode)
2238 && GET_MODE_SIZE (ymode) > UNITS_PER_WORD
2239 && j < FIRST_PSEUDO_REGISTER)
2240 j += hard_regno_nregs (j, ymode) - 1;
2241
2242 return i == j;
2243 }
2244 /* If two operands must match, because they are really a single
2245 operand of an assembler insn, then two postincrements are invalid
2246 because the assembler insn would increment only once.
2247 On the other hand, a postincrement matches ordinary indexing
2248 if the postincrement is the output operand. */
2249 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2250 return operands_match_p (XEXP (x, 0), y);
2251 /* Two preincrements are invalid
2252 because the assembler insn would increment only once.
2253 On the other hand, a preincrement matches ordinary indexing
2254 if the preincrement is the input operand.
2255 In this case, return 2, since some callers need to do special
2256 things when this happens. */
2257 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2258 || GET_CODE (y) == PRE_MODIFY)
2259 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2260
2261 slow:
2262
2263 /* Now we have disposed of all the cases in which different rtx codes
2264 can match. */
2265 if (code != GET_CODE (y))
2266 return 0;
2267
2268 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2269 if (GET_MODE (x) != GET_MODE (y))
2270 return 0;
2271
2272 /* MEMs referring to different address space are not equivalent. */
2273 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2274 return 0;
2275
2276 switch (code)
2277 {
2278 CASE_CONST_UNIQUE:
2279 return 0;
2280
2281 case LABEL_REF:
2282 return label_ref_label (x) == label_ref_label (y);
2283 case SYMBOL_REF:
2284 return XSTR (x, 0) == XSTR (y, 0);
2285
2286 default:
2287 break;
2288 }
2289
2290 /* Compare the elements. If any pair of corresponding elements
2291 fail to match, return 0 for the whole things. */
2292
2293 success_2 = 0;
2294 fmt = GET_RTX_FORMAT (code);
2295 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2296 {
2297 int val, j;
2298 switch (fmt[i])
2299 {
2300 case 'w':
2301 if (XWINT (x, i) != XWINT (y, i))
2302 return 0;
2303 break;
2304
2305 case 'i':
2306 if (XINT (x, i) != XINT (y, i))
2307 return 0;
2308 break;
2309
2310 case 'e':
2311 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2312 if (val == 0)
2313 return 0;
2314 /* If any subexpression returns 2,
2315 we should return 2 if we are successful. */
2316 if (val == 2)
2317 success_2 = 1;
2318 break;
2319
2320 case '0':
2321 break;
2322
2323 case 'E':
2324 if (XVECLEN (x, i) != XVECLEN (y, i))
2325 return 0;
2326 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2327 {
2328 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2329 if (val == 0)
2330 return 0;
2331 if (val == 2)
2332 success_2 = 1;
2333 }
2334 break;
2335
2336 /* It is believed that rtx's at this level will never
2337 contain anything but integers and other rtx's,
2338 except for within LABEL_REFs and SYMBOL_REFs. */
2339 default:
2340 gcc_unreachable ();
2341 }
2342 }
2343 return 1 + success_2;
2344}
2345
2346/* Describe the range of registers or memory referenced by X.
2347 If X is a register, set REG_FLAG and put the first register
2348 number into START and the last plus one into END.
2349 If X is a memory reference, put a base address into BASE
2350 and a range of integer offsets into START and END.
2351 If X is pushing on the stack, we can assume it causes no trouble,
2352 so we set the SAFE field. */
2353
2354static struct decomposition
2355decompose (rtx x)
2356{
2357 struct decomposition val;
2358 int all_const = 0;
2359
2360 memset (&val, 0, sizeof (val));
2361
2362 switch (GET_CODE (x))
2363 {
2364 case MEM:
2365 {
2366 rtx base = NULL_RTX, offset = 0;
2367 rtx addr = XEXP (x, 0);
2368
2369 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2370 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2371 {
2372 val.base = XEXP (addr, 0);
2373 val.start = -GET_MODE_SIZE (GET_MODE (x));
2374 val.end = GET_MODE_SIZE (GET_MODE (x));
2375 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2376 return val;
2377 }
2378
2379 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2380 {
2381 if (GET_CODE (XEXP (addr, 1)) == PLUS
2382 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2383 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2384 {
2385 val.base = XEXP (addr, 0);
2386 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2387 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2388 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2389 return val;
2390 }
2391 }
2392
2393 if (GET_CODE (addr) == CONST)
2394 {
2395 addr = XEXP (addr, 0);
2396 all_const = 1;
2397 }
2398 if (GET_CODE (addr) == PLUS)
2399 {
2400 if (CONSTANT_P (XEXP (addr, 0)))
2401 {
2402 base = XEXP (addr, 1);
2403 offset = XEXP (addr, 0);
2404 }
2405 else if (CONSTANT_P (XEXP (addr, 1)))
2406 {
2407 base = XEXP (addr, 0);
2408 offset = XEXP (addr, 1);
2409 }
2410 }
2411
2412 if (offset == 0)
2413 {
2414 base = addr;
2415 offset = const0_rtx;
2416 }
2417 if (GET_CODE (offset) == CONST)
2418 offset = XEXP (offset, 0);
2419 if (GET_CODE (offset) == PLUS)
2420 {
2421 if (CONST_INT_P (XEXP (offset, 0)))
2422 {
2423 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2424 offset = XEXP (offset, 0);
2425 }
2426 else if (CONST_INT_P (XEXP (offset, 1)))
2427 {
2428 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2429 offset = XEXP (offset, 1);
2430 }
2431 else
2432 {
2433 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2434 offset = const0_rtx;
2435 }
2436 }
2437 else if (!CONST_INT_P (offset))
2438 {
2439 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2440 offset = const0_rtx;
2441 }
2442
2443 if (all_const && GET_CODE (base) == PLUS)
2444 base = gen_rtx_CONST (GET_MODE (base), base);
2445
2446 gcc_assert (CONST_INT_P (offset));
2447
2448 val.start = INTVAL (offset);
2449 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2450 val.base = base;
2451 }
2452 break;
2453
2454 case REG:
2455 val.reg_flag = 1;
2456 val.start = true_regnum (x);
2457 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2458 {
2459 /* A pseudo with no hard reg. */
2460 val.start = REGNO (x);
2461 val.end = val.start + 1;
2462 }
2463 else
2464 /* A hard reg. */
2465 val.end = end_hard_regno (GET_MODE (x), val.start);
2466 break;
2467
2468 case SUBREG:
2469 if (!REG_P (SUBREG_REG (x)))
2470 /* This could be more precise, but it's good enough. */
2471 return decompose (SUBREG_REG (x));
2472 val.reg_flag = 1;
2473 val.start = true_regnum (x);
2474 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2475 return decompose (SUBREG_REG (x));
2476 else
2477 /* A hard reg. */
2478 val.end = val.start + subreg_nregs (x);
2479 break;
2480
2481 case SCRATCH:
2482 /* This hasn't been assigned yet, so it can't conflict yet. */
2483 val.safe = 1;
2484 break;
2485
2486 default:
2487 gcc_assert (CONSTANT_P (x));
2488 val.safe = 1;
2489 break;
2490 }
2491 return val;
2492}
2493
2494/* Return 1 if altering Y will not modify the value of X.
2495 Y is also described by YDATA, which should be decompose (Y). */
2496
2497static int
2498immune_p (rtx x, rtx y, struct decomposition ydata)
2499{
2500 struct decomposition xdata;
2501
2502 if (ydata.reg_flag)
2503 return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0);
2504 if (ydata.safe)
2505 return 1;
2506
2507 gcc_assert (MEM_P (y));
2508 /* If Y is memory and X is not, Y can't affect X. */
2509 if (!MEM_P (x))
2510 return 1;
2511
2512 xdata = decompose (x);
2513
2514 if (! rtx_equal_p (xdata.base, ydata.base))
2515 {
2516 /* If bases are distinct symbolic constants, there is no overlap. */
2517 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2518 return 1;
2519 /* Constants and stack slots never overlap. */
2520 if (CONSTANT_P (xdata.base)
2521 && (ydata.base == frame_pointer_rtx
2522 || ydata.base == hard_frame_pointer_rtx
2523 || ydata.base == stack_pointer_rtx))
2524 return 1;
2525 if (CONSTANT_P (ydata.base)
2526 && (xdata.base == frame_pointer_rtx
2527 || xdata.base == hard_frame_pointer_rtx
2528 || xdata.base == stack_pointer_rtx))
2529 return 1;
2530 /* If either base is variable, we don't know anything. */
2531 return 0;
2532 }
2533
2534 return (xdata.start >= ydata.end || ydata.start >= xdata.end);
2535}
2536
2537/* Similar, but calls decompose. */
2538
2539int
2540safe_from_earlyclobber (rtx op, rtx clobber)
2541{
2542 struct decomposition early_data;
2543
2544 early_data = decompose (clobber);
2545 return immune_p (op, clobber, early_data);
2546}
2547
2548/* Main entry point of this file: search the body of INSN
2549 for values that need reloading and record them with push_reload.
2550 REPLACE nonzero means record also where the values occur
2551 so that subst_reloads can be used.
2552
2553 IND_LEVELS says how many levels of indirection are supported by this
2554 machine; a value of zero means that a memory reference is not a valid
2555 memory address.
2556
2557 LIVE_KNOWN says we have valid information about which hard
2558 regs are live at each point in the program; this is true when
2559 we are called from global_alloc but false when stupid register
2560 allocation has been done.
2561
2562 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2563 which is nonnegative if the reg has been commandeered for reloading into.
2564 It is copied into STATIC_RELOAD_REG_P and referenced from there
2565 by various subroutines.
2566
2567 Return TRUE if some operands need to be changed, because of swapping
2568 commutative operands, reg_equiv_address substitution, or whatever. */
2569
2570int
2571find_reloads (rtx_insn *insn, int replace, int ind_levels, int live_known,
2572 short *reload_reg_p)
2573{
2574 int insn_code_number;
2575 int i, j;
2576 int noperands;
2577 /* These start out as the constraints for the insn
2578 and they are chewed up as we consider alternatives. */
2579 const char *constraints[MAX_RECOG_OPERANDS];
2580 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2581 a register. */
2582 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2583 char pref_or_nothing[MAX_RECOG_OPERANDS];
2584 /* Nonzero for a MEM operand whose entire address needs a reload.
2585 May be -1 to indicate the entire address may or may not need a reload. */
2586 int address_reloaded[MAX_RECOG_OPERANDS];
2587 /* Nonzero for an address operand that needs to be completely reloaded.
2588 May be -1 to indicate the entire operand may or may not need a reload. */
2589 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2590 /* Value of enum reload_type to use for operand. */
2591 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2592 /* Value of enum reload_type to use within address of operand. */
2593 enum reload_type address_type[MAX_RECOG_OPERANDS];
2594 /* Save the usage of each operand. */
2595 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2596 int no_input_reloads = 0, no_output_reloads = 0;
2597 int n_alternatives;
2598 reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2599 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2600 char this_alternative_win[MAX_RECOG_OPERANDS];
2601 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2602 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2603 int this_alternative_matches[MAX_RECOG_OPERANDS];
2604 reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2605 int this_alternative_number;
2606 int goal_alternative_number = 0;
2607 int operand_reloadnum[MAX_RECOG_OPERANDS];
2608 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2609 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2610 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2611 char goal_alternative_win[MAX_RECOG_OPERANDS];
2612 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2613 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2614 int goal_alternative_swapped;
2615 int best;
2616 int commutative;
2617 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2618 rtx substed_operand[MAX_RECOG_OPERANDS];
2619 rtx body = PATTERN (insn);
2620 rtx set = single_set (insn);
2621 int goal_earlyclobber = 0, this_earlyclobber;
2622 machine_mode operand_mode[MAX_RECOG_OPERANDS];
2623 int retval = 0;
2624
2625 this_insn = insn;
2626 n_reloads = 0;
2627 n_replacements = 0;
2628 n_earlyclobbers = 0;
2629 replace_reloads = replace;
2630 hard_regs_live_known = live_known;
2631 static_reload_reg_p = reload_reg_p;
2632
2633 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2634 neither are insns that SET cc0. Insns that use CC0 are not allowed
2635 to have any input reloads. */
2636 if (JUMP_P (insn) || CALL_P (insn))
2637 no_output_reloads = 1;
2638
2639 if (HAVE_cc0 && reg_referenced_p (cc0_rtx, PATTERN (insn)))
2640 no_input_reloads = 1;
2641 if (HAVE_cc0 && reg_set_p (cc0_rtx, PATTERN (insn)))
2642 no_output_reloads = 1;
2643
2644 /* The eliminated forms of any secondary memory locations are per-insn, so
2645 clear them out here. */
2646
2647 if (secondary_memlocs_elim_used)
2648 {
2649 memset (secondary_memlocs_elim, 0,
2650 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2651 secondary_memlocs_elim_used = 0;
2652 }
2653
2654 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2655 is cheap to move between them. If it is not, there may not be an insn
2656 to do the copy, so we may need a reload. */
2657 if (GET_CODE (body) == SET
2658 && REG_P (SET_DEST (body))
2659 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2660 && REG_P (SET_SRC (body))
2661 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2662 && register_move_cost (GET_MODE (SET_SRC (body)),
2663 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2664 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2665 return 0;
2666
2667 extract_insn (insn);
2668
2669 noperands = reload_n_operands = recog_data.n_operands;
2670 n_alternatives = recog_data.n_alternatives;
2671
2672 /* Just return "no reloads" if insn has no operands with constraints. */
2673 if (noperands == 0 || n_alternatives == 0)
2674 return 0;
2675
2676 insn_code_number = INSN_CODE (insn);
2677 this_insn_is_asm = insn_code_number < 0;
2678
2679 memcpy (operand_mode, recog_data.operand_mode,
2680 noperands * sizeof (machine_mode));
2681 memcpy (constraints, recog_data.constraints,
2682 noperands * sizeof (const char *));
2683
2684 commutative = -1;
2685
2686 /* If we will need to know, later, whether some pair of operands
2687 are the same, we must compare them now and save the result.
2688 Reloading the base and index registers will clobber them
2689 and afterward they will fail to match. */
2690
2691 for (i = 0; i < noperands; i++)
2692 {
2693 const char *p;
2694 int c;
2695 char *end;
2696
2697 substed_operand[i] = recog_data.operand[i];
2698 p = constraints[i];
2699
2700 modified[i] = RELOAD_READ;
2701
2702 /* Scan this operand's constraint to see if it is an output operand,
2703 an in-out operand, is commutative, or should match another. */
2704
2705 while ((c = *p))
2706 {
2707 p += CONSTRAINT_LEN (c, p);
2708 switch (c)
2709 {
2710 case '=':
2711 modified[i] = RELOAD_WRITE;
2712 break;
2713 case '+':
2714 modified[i] = RELOAD_READ_WRITE;
2715 break;
2716 case '%':
2717 {
2718 /* The last operand should not be marked commutative. */
2719 gcc_assert (i != noperands - 1);
2720
2721 /* We currently only support one commutative pair of
2722 operands. Some existing asm code currently uses more
2723 than one pair. Previously, that would usually work,
2724 but sometimes it would crash the compiler. We
2725 continue supporting that case as well as we can by
2726 silently ignoring all but the first pair. In the
2727 future we may handle it correctly. */
2728 if (commutative < 0)
2729 commutative = i;
2730 else
2731 gcc_assert (this_insn_is_asm);
2732 }
2733 break;
2734 /* Use of ISDIGIT is tempting here, but it may get expensive because
2735 of locale support we don't want. */
2736 case '0': case '1': case '2': case '3': case '4':
2737 case '5': case '6': case '7': case '8': case '9':
2738 {
2739 c = strtoul (p - 1, &end, 10);
2740 p = end;
2741
2742 operands_match[c][i]
2743 = operands_match_p (recog_data.operand[c],
2744 recog_data.operand[i]);
2745
2746 /* An operand may not match itself. */
2747 gcc_assert (c != i);
2748
2749 /* If C can be commuted with C+1, and C might need to match I,
2750 then C+1 might also need to match I. */
2751 if (commutative >= 0)
2752 {
2753 if (c == commutative || c == commutative + 1)
2754 {
2755 int other = c + (c == commutative ? 1 : -1);
2756 operands_match[other][i]
2757 = operands_match_p (recog_data.operand[other],
2758 recog_data.operand[i]);
2759 }
2760 if (i == commutative || i == commutative + 1)
2761 {
2762 int other = i + (i == commutative ? 1 : -1);
2763 operands_match[c][other]
2764 = operands_match_p (recog_data.operand[c],
2765 recog_data.operand[other]);
2766 }
2767 /* Note that C is supposed to be less than I.
2768 No need to consider altering both C and I because in
2769 that case we would alter one into the other. */
2770 }
2771 }
2772 }
2773 }
2774 }
2775
2776 /* Examine each operand that is a memory reference or memory address
2777 and reload parts of the addresses into index registers.
2778 Also here any references to pseudo regs that didn't get hard regs
2779 but are equivalent to constants get replaced in the insn itself
2780 with those constants. Nobody will ever see them again.
2781
2782 Finally, set up the preferred classes of each operand. */
2783
2784 for (i = 0; i < noperands; i++)
2785 {
2786 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2787
2788 address_reloaded[i] = 0;
2789 address_operand_reloaded[i] = 0;
2790 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2791 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2792 : RELOAD_OTHER);
2793 address_type[i]
2794 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2795 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2796 : RELOAD_OTHER);
2797
2798 if (*constraints[i] == 0)
2799 /* Ignore things like match_operator operands. */
2800 ;
2801 else if (insn_extra_address_constraint
2802 (lookup_constraint (constraints[i])))
2803 {
2804 address_operand_reloaded[i]
2805 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2806 recog_data.operand[i],
2807 recog_data.operand_loc[i],
2808 i, operand_type[i], ind_levels, insn);
2809
2810 /* If we now have a simple operand where we used to have a
2811 PLUS or MULT, re-recognize and try again. */
2812 if ((OBJECT_P (*recog_data.operand_loc[i])
2813 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2814 && (GET_CODE (recog_data.operand[i]) == MULT
2815 || GET_CODE (recog_data.operand[i]) == PLUS))
2816 {
2817 INSN_CODE (insn) = -1;
2818 retval = find_reloads (insn, replace, ind_levels, live_known,
2819 reload_reg_p);
2820 return retval;
2821 }
2822
2823 recog_data.operand[i] = *recog_data.operand_loc[i];
2824 substed_operand[i] = recog_data.operand[i];
2825
2826 /* Address operands are reloaded in their existing mode,
2827 no matter what is specified in the machine description. */
2828 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2829
2830 /* If the address is a single CONST_INT pick address mode
2831 instead otherwise we will later not know in which mode
2832 the reload should be performed. */
2833 if (operand_mode[i] == VOIDmode)
2834 operand_mode[i] = Pmode;
2835
2836 }
2837 else if (code == MEM)
2838 {
2839 address_reloaded[i]
2840 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2841 recog_data.operand_loc[i],
2842 XEXP (recog_data.operand[i], 0),
2843 &XEXP (recog_data.operand[i], 0),
2844 i, address_type[i], ind_levels, insn);
2845 recog_data.operand[i] = *recog_data.operand_loc[i];
2846 substed_operand[i] = recog_data.operand[i];
2847 }
2848 else if (code == SUBREG)
2849 {
2850 rtx reg = SUBREG_REG (recog_data.operand[i]);
2851 rtx op
2852 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2853 ind_levels,
2854 set != 0
2855 && &SET_DEST (set) == recog_data.operand_loc[i],
2856 insn,
2857 &address_reloaded[i]);
2858
2859 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2860 that didn't get a hard register, emit a USE with a REG_EQUAL
2861 note in front so that we might inherit a previous, possibly
2862 wider reload. */
2863
2864 if (replace
2865 && MEM_P (op)
2866 && REG_P (reg)
2867 && (GET_MODE_SIZE (GET_MODE (reg))
2868 >= GET_MODE_SIZE (GET_MODE (op)))
2869 && reg_equiv_constant (REGNO (reg)) == 0)
2870 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2871 insn),
2872 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2873
2874 substed_operand[i] = recog_data.operand[i] = op;
2875 }
2876 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2877 /* We can get a PLUS as an "operand" as a result of register
2878 elimination. See eliminate_regs and gen_reload. We handle
2879 a unary operator by reloading the operand. */
2880 substed_operand[i] = recog_data.operand[i]
2881 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2882 ind_levels, 0, insn,
2883 &address_reloaded[i]);
2884 else if (code == REG)
2885 {
2886 /* This is equivalent to calling find_reloads_toplev.
2887 The code is duplicated for speed.
2888 When we find a pseudo always equivalent to a constant,
2889 we replace it by the constant. We must be sure, however,
2890 that we don't try to replace it in the insn in which it
2891 is being set. */
2892 int regno = REGNO (recog_data.operand[i]);
2893 if (reg_equiv_constant (regno) != 0
2894 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2895 {
2896 /* Record the existing mode so that the check if constants are
2897 allowed will work when operand_mode isn't specified. */
2898
2899 if (operand_mode[i] == VOIDmode)
2900 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2901
2902 substed_operand[i] = recog_data.operand[i]
2903 = reg_equiv_constant (regno);
2904 }
2905 if (reg_equiv_memory_loc (regno) != 0
2906 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2907 /* We need not give a valid is_set_dest argument since the case
2908 of a constant equivalence was checked above. */
2909 substed_operand[i] = recog_data.operand[i]
2910 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2911 ind_levels, 0, insn,
2912 &address_reloaded[i]);
2913 }
2914 /* If the operand is still a register (we didn't replace it with an
2915 equivalent), get the preferred class to reload it into. */
2916 code = GET_CODE (recog_data.operand[i]);
2917 preferred_class[i]
2918 = ((code == REG && REGNO (recog_data.operand[i])
2919 >= FIRST_PSEUDO_REGISTER)
2920 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2921 : NO_REGS);
2922 pref_or_nothing[i]
2923 = (code == REG
2924 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2925 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2926 }
2927
2928 /* If this is simply a copy from operand 1 to operand 0, merge the
2929 preferred classes for the operands. */
2930 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2931 && recog_data.operand[1] == SET_SRC (set))
2932 {
2933 preferred_class[0] = preferred_class[1]
2934 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2935 pref_or_nothing[0] |= pref_or_nothing[1];
2936 pref_or_nothing[1] |= pref_or_nothing[0];
2937 }
2938
2939 /* Now see what we need for pseudo-regs that didn't get hard regs
2940 or got the wrong kind of hard reg. For this, we must consider
2941 all the operands together against the register constraints. */
2942
2943 best = MAX_RECOG_OPERANDS * 2 + 600;
2944
2945 goal_alternative_swapped = 0;
2946
2947 /* The constraints are made of several alternatives.
2948 Each operand's constraint looks like foo,bar,... with commas
2949 separating the alternatives. The first alternatives for all
2950 operands go together, the second alternatives go together, etc.
2951
2952 First loop over alternatives. */
2953
2954 alternative_mask enabled = get_enabled_alternatives (insn);
2955 for (this_alternative_number = 0;
2956 this_alternative_number < n_alternatives;
2957 this_alternative_number++)
2958 {
2959 int swapped;
2960
2961 if (!TEST_BIT (enabled, this_alternative_number))
2962 {
2963 int i;
2964
2965 for (i = 0; i < recog_data.n_operands; i++)
2966 constraints[i] = skip_alternative (constraints[i]);
2967
2968 continue;
2969 }
2970
2971 /* If insn is commutative (it's safe to exchange a certain pair
2972 of operands) then we need to try each alternative twice, the
2973 second time matching those two operands as if we had
2974 exchanged them. To do this, really exchange them in
2975 operands. */
2976 for (swapped = 0; swapped < (commutative >= 0 ? 2 : 1); swapped++)
2977 {
2978 /* Loop over operands for one constraint alternative. */
2979 /* LOSERS counts those that don't fit this alternative
2980 and would require loading. */
2981 int losers = 0;
2982 /* BAD is set to 1 if it some operand can't fit this alternative
2983 even after reloading. */
2984 int bad = 0;
2985 /* REJECT is a count of how undesirable this alternative says it is
2986 if any reloading is required. If the alternative matches exactly
2987 then REJECT is ignored, but otherwise it gets this much
2988 counted against it in addition to the reloading needed. Each
2989 ? counts three times here since we want the disparaging caused by
2990 a bad register class to only count 1/3 as much. */
2991 int reject = 0;
2992
2993 if (swapped)
2994 {
2995 recog_data.operand[commutative] = substed_operand[commutative + 1];
2996 recog_data.operand[commutative + 1] = substed_operand[commutative];
2997 /* Swap the duplicates too. */
2998 for (i = 0; i < recog_data.n_dups; i++)
2999 if (recog_data.dup_num[i] == commutative
3000 || recog_data.dup_num[i] == commutative + 1)
3001 *recog_data.dup_loc[i]
3002 = recog_data.operand[(int) recog_data.dup_num[i]];
3003
3004 std::swap (preferred_class[commutative],
3005 preferred_class[commutative + 1]);
3006 std::swap (pref_or_nothing[commutative],
3007 pref_or_nothing[commutative + 1]);
3008 std::swap (address_reloaded[commutative],
3009 address_reloaded[commutative + 1]);
3010 }
3011
3012 this_earlyclobber = 0;
3013
3014 for (i = 0; i < noperands; i++)
3015 {
3016 const char *p = constraints[i];
3017 char *end;
3018 int len;
3019 int win = 0;
3020 int did_match = 0;
3021 /* 0 => this operand can be reloaded somehow for this alternative. */
3022 int badop = 1;
3023 /* 0 => this operand can be reloaded if the alternative allows regs. */
3024 int winreg = 0;
3025 int c;
3026 int m;
3027 rtx operand = recog_data.operand[i];
3028 int offset = 0;
3029 /* Nonzero means this is a MEM that must be reloaded into a reg
3030 regardless of what the constraint says. */
3031 int force_reload = 0;
3032 int offmemok = 0;
3033 /* Nonzero if a constant forced into memory would be OK for this
3034 operand. */
3035 int constmemok = 0;
3036 int earlyclobber = 0;
3037 enum constraint_num cn;
3038 enum reg_class cl;
3039
3040 /* If the predicate accepts a unary operator, it means that
3041 we need to reload the operand, but do not do this for
3042 match_operator and friends. */
3043 if (UNARY_P (operand) && *p != 0)
3044 operand = XEXP (operand, 0);
3045
3046 /* If the operand is a SUBREG, extract
3047 the REG or MEM (or maybe even a constant) within.
3048 (Constants can occur as a result of reg_equiv_constant.) */
3049
3050 while (GET_CODE (operand) == SUBREG)
3051 {
3052 /* Offset only matters when operand is a REG and
3053 it is a hard reg. This is because it is passed
3054 to reg_fits_class_p if it is a REG and all pseudos
3055 return 0 from that function. */
3056 if (REG_P (SUBREG_REG (operand))
3057 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3058 {
3059 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3060 GET_MODE (SUBREG_REG (operand)),
3061 SUBREG_BYTE (operand),
3062 GET_MODE (operand)) < 0)
3063 force_reload = 1;
3064 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3065 GET_MODE (SUBREG_REG (operand)),
3066 SUBREG_BYTE (operand),
3067 GET_MODE (operand));
3068 }
3069 operand = SUBREG_REG (operand);
3070 /* Force reload if this is a constant or PLUS or if there may
3071 be a problem accessing OPERAND in the outer mode. */
3072 scalar_int_mode inner_mode;
3073 if (CONSTANT_P (operand)
3074 || GET_CODE (operand) == PLUS
3075 /* We must force a reload of paradoxical SUBREGs
3076 of a MEM because the alignment of the inner value
3077 may not be enough to do the outer reference. On
3078 big-endian machines, it may also reference outside
3079 the object.
3080
3081 On machines that extend byte operations and we have a
3082 SUBREG where both the inner and outer modes are no wider
3083 than a word and the inner mode is narrower, is integral,
3084 and gets extended when loaded from memory, combine.c has
3085 made assumptions about the behavior of the machine in such
3086 register access. If the data is, in fact, in memory we
3087 must always load using the size assumed to be in the
3088 register and let the insn do the different-sized
3089 accesses.
3090
3091 This is doubly true if WORD_REGISTER_OPERATIONS. In
3092 this case eliminate_regs has left non-paradoxical
3093 subregs for push_reload to see. Make sure it does
3094 by forcing the reload.
3095
3096 ??? When is it right at this stage to have a subreg
3097 of a mem that is _not_ to be handled specially? IMO
3098 those should have been reduced to just a mem. */
3099 || ((MEM_P (operand)
3100 || (REG_P (operand)
3101 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3102 && (WORD_REGISTER_OPERATIONS
3103 || ((GET_MODE_BITSIZE (GET_MODE (operand))
3104 < BIGGEST_ALIGNMENT)
3105 && paradoxical_subreg_p (operand_mode[i],
3106 GET_MODE (operand)))
3107 || BYTES_BIG_ENDIAN
3108 || ((GET_MODE_SIZE (operand_mode[i])
3109 <= UNITS_PER_WORD)
3110 && (is_a <scalar_int_mode>
3111 (GET_MODE (operand), &inner_mode))
3112 && (GET_MODE_SIZE (inner_mode)
3113 <= UNITS_PER_WORD)
3114 && paradoxical_subreg_p (operand_mode[i],
3115 inner_mode)
3116 && LOAD_EXTEND_OP (inner_mode) != UNKNOWN)))
3117 )
3118 force_reload = 1;
3119 }
3120
3121 this_alternative[i] = NO_REGS;
3122 this_alternative_win[i] = 0;
3123 this_alternative_match_win[i] = 0;
3124 this_alternative_offmemok[i] = 0;
3125 this_alternative_earlyclobber[i] = 0;
3126 this_alternative_matches[i] = -1;
3127
3128 /* An empty constraint or empty alternative
3129 allows anything which matched the pattern. */
3130 if (*p == 0 || *p == ',')
3131 win = 1, badop = 0;
3132
3133 /* Scan this alternative's specs for this operand;
3134 set WIN if the operand fits any letter in this alternative.
3135 Otherwise, clear BADOP if this operand could
3136 fit some letter after reloads,
3137 or set WINREG if this operand could fit after reloads
3138 provided the constraint allows some registers. */
3139
3140 do
3141 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3142 {
3143 case '\0':
3144 len = 0;
3145 break;
3146 case ',':
3147 c = '\0';
3148 break;
3149
3150 case '?':
3151 reject += 6;
3152 break;
3153
3154 case '!':
3155 reject = 600;
3156 break;
3157
3158 case '#':
3159 /* Ignore rest of this alternative as far as
3160 reloading is concerned. */
3161 do
3162 p++;
3163 while (*p && *p != ',');
3164 len = 0;
3165 break;
3166
3167 case '0': case '1': case '2': case '3': case '4':
3168 case '5': case '6': case '7': case '8': case '9':
3169 m = strtoul (p, &end, 10);
3170 p = end;
3171 len = 0;
3172
3173 this_alternative_matches[i] = m;
3174 /* We are supposed to match a previous operand.
3175 If we do, we win if that one did.
3176 If we do not, count both of the operands as losers.
3177 (This is too conservative, since most of the time
3178 only a single reload insn will be needed to make
3179 the two operands win. As a result, this alternative
3180 may be rejected when it is actually desirable.) */
3181 if ((swapped && (m != commutative || i != commutative + 1))
3182 /* If we are matching as if two operands were swapped,
3183 also pretend that operands_match had been computed
3184 with swapped.
3185 But if I is the second of those and C is the first,
3186 don't exchange them, because operands_match is valid
3187 only on one side of its diagonal. */
3188 ? (operands_match
3189 [(m == commutative || m == commutative + 1)
3190 ? 2 * commutative + 1 - m : m]
3191 [(i == commutative || i == commutative + 1)
3192 ? 2 * commutative + 1 - i : i])
3193 : operands_match[m][i])
3194 {
3195 /* If we are matching a non-offsettable address where an
3196 offsettable address was expected, then we must reject
3197 this combination, because we can't reload it. */
3198 if (this_alternative_offmemok[m]
3199 && MEM_P (recog_data.operand[m])
3200 && this_alternative[m] == NO_REGS
3201 && ! this_alternative_win[m])
3202 bad = 1;
3203
3204 did_match = this_alternative_win[m];
3205 }
3206 else
3207 {
3208 /* Operands don't match. */
3209 rtx value;
3210 int loc1, loc2;
3211 /* Retroactively mark the operand we had to match
3212 as a loser, if it wasn't already. */
3213 if (this_alternative_win[m])
3214 losers++;
3215 this_alternative_win[m] = 0;
3216 if (this_alternative[m] == NO_REGS)
3217 bad = 1;
3218 /* But count the pair only once in the total badness of
3219 this alternative, if the pair can be a dummy reload.
3220 The pointers in operand_loc are not swapped; swap
3221 them by hand if necessary. */
3222 if (swapped && i == commutative)
3223 loc1 = commutative + 1;
3224 else if (swapped && i == commutative + 1)
3225 loc1 = commutative;
3226 else
3227 loc1 = i;
3228 if (swapped && m == commutative)
3229 loc2 = commutative + 1;
3230 else if (swapped && m == commutative + 1)
3231 loc2 = commutative;
3232 else
3233 loc2 = m;
3234 value
3235 = find_dummy_reload (recog_data.operand[i],
3236 recog_data.operand[m],
3237 recog_data.operand_loc[loc1],
3238 recog_data.operand_loc[loc2],
3239 operand_mode[i], operand_mode[m],
3240 this_alternative[m], -1,
3241 this_alternative_earlyclobber[m]);
3242
3243 if (value != 0)
3244 losers--;
3245 }
3246 /* This can be fixed with reloads if the operand
3247 we are supposed to match can be fixed with reloads. */
3248 badop = 0;
3249 this_alternative[i] = this_alternative[m];
3250
3251 /* If we have to reload this operand and some previous
3252 operand also had to match the same thing as this
3253 operand, we don't know how to do that. So reject this
3254 alternative. */
3255 if (! did_match || force_reload)
3256 for (j = 0; j < i; j++)
3257 if (this_alternative_matches[j]
3258 == this_alternative_matches[i])
3259 {
3260 badop = 1;
3261 break;
3262 }
3263 break;
3264
3265 case 'p':
3266 /* All necessary reloads for an address_operand
3267 were handled in find_reloads_address. */
3268 this_alternative[i]
3269 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3270 ADDRESS, SCRATCH);
3271 win = 1;
3272 badop = 0;
3273 break;
3274
3275 case TARGET_MEM_CONSTRAINT:
3276 if (force_reload)
3277 break;
3278 if (MEM_P (operand)
3279 || (REG_P (operand)
3280 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3281 && reg_renumber[REGNO (operand)] < 0))
3282 win = 1;
3283 if (CONST_POOL_OK_P (operand_mode[i], operand))
3284 badop = 0;
3285 constmemok = 1;
3286 break;
3287
3288 case '<':
3289 if (MEM_P (operand)
3290 && ! address_reloaded[i]
3291 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3292 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3293 win = 1;
3294 break;
3295
3296 case '>':
3297 if (MEM_P (operand)
3298 && ! address_reloaded[i]
3299 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3300 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3301 win = 1;
3302 break;
3303
3304 /* Memory operand whose address is not offsettable. */
3305 case 'V':
3306 if (force_reload)
3307 break;
3308 if (MEM_P (operand)
3309 && ! (ind_levels ? offsettable_memref_p (operand)
3310 : offsettable_nonstrict_memref_p (operand))
3311 /* Certain mem addresses will become offsettable
3312 after they themselves are reloaded. This is important;
3313 we don't want our own handling of unoffsettables
3314 to override the handling of reg_equiv_address. */
3315 && !(REG_P (XEXP (operand, 0))
3316 && (ind_levels == 0
3317 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3318 win = 1;
3319 break;
3320
3321 /* Memory operand whose address is offsettable. */
3322 case 'o':
3323 if (force_reload)
3324 break;
3325 if ((MEM_P (operand)
3326 /* If IND_LEVELS, find_reloads_address won't reload a
3327 pseudo that didn't get a hard reg, so we have to
3328 reject that case. */
3329 && ((ind_levels ? offsettable_memref_p (operand)
3330 : offsettable_nonstrict_memref_p (operand))
3331 /* A reloaded address is offsettable because it is now
3332 just a simple register indirect. */
3333 || address_reloaded[i] == 1))
3334 || (REG_P (operand)
3335 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3336 && reg_renumber[REGNO (operand)] < 0
3337 /* If reg_equiv_address is nonzero, we will be
3338 loading it into a register; hence it will be
3339 offsettable, but we cannot say that reg_equiv_mem
3340 is offsettable without checking. */
3341 && ((reg_equiv_mem (REGNO (operand)) != 0
3342 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3343 || (reg_equiv_address (REGNO (operand)) != 0))))
3344 win = 1;
3345 if (CONST_POOL_OK_P (operand_mode[i], operand)
3346 || MEM_P (operand))
3347 badop = 0;
3348 constmemok = 1;
3349 offmemok = 1;
3350 break;
3351
3352 case '&':
3353 /* Output operand that is stored before the need for the
3354 input operands (and their index registers) is over. */
3355 earlyclobber = 1, this_earlyclobber = 1;
3356 break;
3357
3358 case 'X':
3359 force_reload = 0;
3360 win = 1;
3361 break;
3362
3363 case 'g':
3364 if (! force_reload
3365 /* A PLUS is never a valid operand, but reload can make
3366 it from a register when eliminating registers. */
3367 && GET_CODE (operand) != PLUS
3368 /* A SCRATCH is not a valid operand. */
3369 && GET_CODE (operand) != SCRATCH
3370 && (! CONSTANT_P (operand)
3371 || ! flag_pic
3372 || LEGITIMATE_PIC_OPERAND_P (operand))
3373 && (GENERAL_REGS == ALL_REGS
3374 || !REG_P (operand)
3375 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3376 && reg_renumber[REGNO (operand)] < 0)))
3377 win = 1;
3378 cl = GENERAL_REGS;
3379 goto reg;
3380
3381 default:
3382 cn = lookup_constraint (p);
3383 switch (get_constraint_type (cn))
3384 {
3385 case CT_REGISTER:
3386 cl = reg_class_for_constraint (cn);
3387 if (cl != NO_REGS)
3388 goto reg;
3389 break;
3390
3391 case CT_CONST_INT:
3392 if (CONST_INT_P (operand)
3393 && (insn_const_int_ok_for_constraint
3394 (INTVAL (operand), cn)))
3395 win = true;
3396 break;
3397
3398 case CT_MEMORY:
3399 if (force_reload)
3400 break;
3401 if (constraint_satisfied_p (operand, cn))
3402 win = 1;
3403 /* If the address was already reloaded,
3404 we win as well. */
3405 else if (MEM_P (operand) && address_reloaded[i] == 1)
3406 win = 1;
3407 /* Likewise if the address will be reloaded because
3408 reg_equiv_address is nonzero. For reg_equiv_mem
3409 we have to check. */
3410 else if (REG_P (operand)
3411 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3412 && reg_renumber[REGNO (operand)] < 0
3413 && ((reg_equiv_mem (REGNO (operand)) != 0
3414 && (constraint_satisfied_p
3415 (reg_equiv_mem (REGNO (operand)),
3416 cn)))
3417 || (reg_equiv_address (REGNO (operand))
3418 != 0)))
3419 win = 1;
3420
3421 /* If we didn't already win, we can reload
3422 constants via force_const_mem, and other
3423 MEMs by reloading the address like for 'o'. */
3424 if (CONST_POOL_OK_P (operand_mode[i], operand)
3425 || MEM_P (operand))
3426 badop = 0;
3427 constmemok = 1;
3428 offmemok = 1;
3429 break;
3430
3431 case CT_SPECIAL_MEMORY:
3432 if (force_reload)
3433 break;
3434 if (constraint_satisfied_p (operand, cn))
3435 win = 1;
3436 /* Likewise if the address will be reloaded because
3437 reg_equiv_address is nonzero. For reg_equiv_mem
3438 we have to check. */
3439 else if (REG_P (operand)
3440 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3441 && reg_renumber[REGNO (operand)] < 0
3442 && reg_equiv_mem (REGNO (operand)) != 0
3443 && (constraint_satisfied_p
3444 (reg_equiv_mem (REGNO (operand)), cn)))
3445 win = 1;
3446 break;
3447
3448 case CT_ADDRESS:
3449 if (constraint_satisfied_p (operand, cn))
3450 win = 1;
3451
3452 /* If we didn't already win, we can reload
3453 the address into a base register. */
3454 this_alternative[i]
3455 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3456 ADDRESS, SCRATCH);
3457 badop = 0;
3458 break;
3459
3460 case CT_FIXED_FORM:
3461 if (constraint_satisfied_p (operand, cn))
3462 win = 1;
3463 break;
3464 }
3465 break;
3466
3467 reg:
3468 this_alternative[i]
3469 = reg_class_subunion[this_alternative[i]][cl];
3470 if (GET_MODE (operand) == BLKmode)
3471 break;
3472 winreg = 1;
3473 if (REG_P (operand)
3474 && reg_fits_class_p (operand, this_alternative[i],
3475 offset, GET_MODE (recog_data.operand[i])))
3476 win = 1;
3477 break;
3478 }
3479 while ((p += len), c);
3480
3481 if (swapped == (commutative >= 0 ? 1 : 0))
3482 constraints[i] = p;
3483
3484 /* If this operand could be handled with a reg,
3485 and some reg is allowed, then this operand can be handled. */
3486 if (winreg && this_alternative[i] != NO_REGS
3487 && (win || !class_only_fixed_regs[this_alternative[i]]))
3488 badop = 0;
3489
3490 /* Record which operands fit this alternative. */
3491 this_alternative_earlyclobber[i] = earlyclobber;
3492 if (win && ! force_reload)
3493 this_alternative_win[i] = 1;
3494 else if (did_match && ! force_reload)
3495 this_alternative_match_win[i] = 1;
3496 else
3497 {
3498 int const_to_mem = 0;
3499
3500 this_alternative_offmemok[i] = offmemok;
3501 losers++;
3502 if (badop)
3503 bad = 1;
3504 /* Alternative loses if it has no regs for a reg operand. */
3505 if (REG_P (operand)
3506 && this_alternative[i] == NO_REGS
3507 && this_alternative_matches[i] < 0)
3508 bad = 1;
3509
3510 /* If this is a constant that is reloaded into the desired
3511 class by copying it to memory first, count that as another
3512 reload. This is consistent with other code and is
3513 required to avoid choosing another alternative when
3514 the constant is moved into memory by this function on
3515 an early reload pass. Note that the test here is
3516 precisely the same as in the code below that calls
3517 force_const_mem. */
3518 if (CONST_POOL_OK_P (operand_mode[i], operand)
3519 && ((targetm.preferred_reload_class (operand,
3520 this_alternative[i])
3521 == NO_REGS)
3522 || no_input_reloads))
3523 {
3524 const_to_mem = 1;
3525 if (this_alternative[i] != NO_REGS)
3526 losers++;
3527 }
3528
3529 /* Alternative loses if it requires a type of reload not
3530 permitted for this insn. We can always reload SCRATCH
3531 and objects with a REG_UNUSED note. */
3532 if (GET_CODE (operand) != SCRATCH
3533 && modified[i] != RELOAD_READ && no_output_reloads
3534 && ! find_reg_note (insn, REG_UNUSED, operand))
3535 bad = 1;
3536 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3537 && ! const_to_mem)
3538 bad = 1;
3539
3540 /* If we can't reload this value at all, reject this
3541 alternative. Note that we could also lose due to
3542 LIMIT_RELOAD_CLASS, but we don't check that
3543 here. */
3544
3545 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3546 {
3547 if (targetm.preferred_reload_class (operand,
3548 this_alternative[i])
3549 == NO_REGS)
3550 reject = 600;
3551
3552 if (operand_type[i] == RELOAD_FOR_OUTPUT
3553 && (targetm.preferred_output_reload_class (operand,
3554 this_alternative[i])
3555 == NO_REGS))
3556 reject = 600;
3557 }
3558
3559 /* We prefer to reload pseudos over reloading other things,
3560 since such reloads may be able to be eliminated later.
3561 If we are reloading a SCRATCH, we won't be generating any
3562 insns, just using a register, so it is also preferred.
3563 So bump REJECT in other cases. Don't do this in the
3564 case where we are forcing a constant into memory and
3565 it will then win since we don't want to have a different
3566 alternative match then. */
3567 if (! (REG_P (operand)
3568 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3569 && GET_CODE (operand) != SCRATCH
3570 && ! (const_to_mem && constmemok))
3571 reject += 2;
3572
3573 /* Input reloads can be inherited more often than output
3574 reloads can be removed, so penalize output reloads. */
3575 if (operand_type[i] != RELOAD_FOR_INPUT
3576 && GET_CODE (operand) != SCRATCH)
3577 reject++;
3578 }
3579
3580 /* If this operand is a pseudo register that didn't get
3581 a hard reg and this alternative accepts some
3582 register, see if the class that we want is a subset
3583 of the preferred class for this register. If not,
3584 but it intersects that class, use the preferred class
3585 instead. If it does not intersect the preferred
3586 class, show that usage of this alternative should be
3587 discouraged; it will be discouraged more still if the
3588 register is `preferred or nothing'. We do this
3589 because it increases the chance of reusing our spill
3590 register in a later insn and avoiding a pair of
3591 memory stores and loads.
3592
3593 Don't bother with this if this alternative will
3594 accept this operand.
3595
3596 Don't do this for a multiword operand, since it is
3597 only a small win and has the risk of requiring more
3598 spill registers, which could cause a large loss.
3599
3600 Don't do this if the preferred class has only one
3601 register because we might otherwise exhaust the
3602 class. */
3603
3604 if (! win && ! did_match
3605 && this_alternative[i] != NO_REGS
3606 && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3607 && reg_class_size [(int) preferred_class[i]] > 0
3608 && ! small_register_class_p (preferred_class[i]))
3609 {
3610 if (! reg_class_subset_p (this_alternative[i],
3611 preferred_class[i]))
3612 {
3613 /* Since we don't have a way of forming the intersection,
3614 we just do something special if the preferred class
3615 is a subset of the class we have; that's the most
3616 common case anyway. */
3617 if (reg_class_subset_p (preferred_class[i],
3618 this_alternative[i]))
3619 this_alternative[i] = preferred_class[i];
3620 else
3621 reject += (2 + 2 * pref_or_nothing[i]);
3622 }
3623 }
3624 }
3625
3626 /* Now see if any output operands that are marked "earlyclobber"
3627 in this alternative conflict with any input operands
3628 or any memory addresses. */
3629
3630 for (i = 0; i < noperands; i++)
3631 if (this_alternative_earlyclobber[i]
3632 && (this_alternative_win[i] || this_alternative_match_win[i]))
3633 {
3634 struct decomposition early_data;
3635
3636 early_data = decompose (recog_data.operand[i]);
3637
3638 gcc_assert (modified[i] != RELOAD_READ);
3639
3640 if (this_alternative[i] == NO_REGS)
3641 {
3642 this_alternative_earlyclobber[i] = 0;
3643 gcc_assert (this_insn_is_asm);
3644 error_for_asm (this_insn,
3645 "%<&%> constraint used with no register class");
3646 }
3647
3648 for (j = 0; j < noperands; j++)
3649 /* Is this an input operand or a memory ref? */
3650 if ((MEM_P (recog_data.operand[j])
3651 || modified[j] != RELOAD_WRITE)
3652 && j != i
3653 /* Ignore things like match_operator operands. */
3654 && !recog_data.is_operator[j]
3655 /* Don't count an input operand that is constrained to match
3656 the early clobber operand. */
3657 && ! (this_alternative_matches[j] == i
3658 && rtx_equal_p (recog_data.operand[i],
3659 recog_data.operand[j]))
3660 /* Is it altered by storing the earlyclobber operand? */
3661 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3662 early_data))
3663 {
3664 /* If the output is in a non-empty few-regs class,
3665 it's costly to reload it, so reload the input instead. */
3666 if (small_register_class_p (this_alternative[i])
3667 && (REG_P (recog_data.operand[j])
3668 || GET_CODE (recog_data.operand[j]) == SUBREG))
3669 {
3670 losers++;
3671 this_alternative_win[j] = 0;
3672 this_alternative_match_win[j] = 0;
3673 }
3674 else
3675 break;
3676 }
3677 /* If an earlyclobber operand conflicts with something,
3678 it must be reloaded, so request this and count the cost. */
3679 if (j != noperands)
3680 {
3681 losers++;
3682 this_alternative_win[i] = 0;
3683 this_alternative_match_win[j] = 0;
3684 for (j = 0; j < noperands; j++)
3685 if (this_alternative_matches[j] == i
3686 && this_alternative_match_win[j])
3687 {
3688 this_alternative_win[j] = 0;
3689 this_alternative_match_win[j] = 0;
3690 losers++;
3691 }
3692 }
3693 }
3694
3695 /* If one alternative accepts all the operands, no reload required,
3696 choose that alternative; don't consider the remaining ones. */
3697 if (losers == 0)
3698 {
3699 /* Unswap these so that they are never swapped at `finish'. */
3700 if (swapped)
3701 {
3702 recog_data.operand[commutative] = substed_operand[commutative];
3703 recog_data.operand[commutative + 1]
3704 = substed_operand[commutative + 1];
3705 }
3706 for (i = 0; i < noperands; i++)
3707 {
3708 goal_alternative_win[i] = this_alternative_win[i];
3709 goal_alternative_match_win[i] = this_alternative_match_win[i];
3710 goal_alternative[i] = this_alternative[i];
3711 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3712 goal_alternative_matches[i] = this_alternative_matches[i];
3713 goal_alternative_earlyclobber[i]
3714 = this_alternative_earlyclobber[i];
3715 }
3716 goal_alternative_number = this_alternative_number;
3717 goal_alternative_swapped = swapped;
3718 goal_earlyclobber = this_earlyclobber;
3719 goto finish;
3720 }
3721
3722 /* REJECT, set by the ! and ? constraint characters and when a register
3723 would be reloaded into a non-preferred class, discourages the use of
3724 this alternative for a reload goal. REJECT is incremented by six
3725 for each ? and two for each non-preferred class. */
3726 losers = losers * 6 + reject;
3727
3728 /* If this alternative can be made to work by reloading,
3729 and it needs less reloading than the others checked so far,
3730 record it as the chosen goal for reloading. */
3731 if (! bad)
3732 {
3733 if (best > losers)
3734 {
3735 for (i = 0; i < noperands; i++)
3736 {
3737 goal_alternative[i] = this_alternative[i];
3738 goal_alternative_win[i] = this_alternative_win[i];
3739 goal_alternative_match_win[i]
3740 = this_alternative_match_win[i];
3741 goal_alternative_offmemok[i]
3742 = this_alternative_offmemok[i];
3743 goal_alternative_matches[i] = this_alternative_matches[i];
3744 goal_alternative_earlyclobber[i]
3745 = this_alternative_earlyclobber[i];
3746 }
3747 goal_alternative_swapped = swapped;
3748 best = losers;
3749 goal_alternative_number = this_alternative_number;
3750 goal_earlyclobber = this_earlyclobber;
3751 }
3752 }
3753
3754 if (swapped)
3755 {
3756 /* If the commutative operands have been swapped, swap
3757 them back in order to check the next alternative. */
3758 recog_data.operand[commutative] = substed_operand[commutative];
3759 recog_data.operand[commutative + 1] = substed_operand[commutative + 1];
3760 /* Unswap the duplicates too. */
3761 for (i = 0; i < recog_data.n_dups; i++)
3762 if (recog_data.dup_num[i] == commutative
3763 || recog_data.dup_num[i] == commutative + 1)
3764 *recog_data.dup_loc[i]
3765 = recog_data.operand[(int) recog_data.dup_num[i]];
3766
3767 /* Unswap the operand related information as well. */
3768 std::swap (preferred_class[commutative],
3769 preferred_class[commutative + 1]);
3770 std::swap (pref_or_nothing[commutative],
3771 pref_or_nothing[commutative + 1]);
3772 std::swap (address_reloaded[commutative],
3773 address_reloaded[commutative + 1]);
3774 }
3775 }
3776 }
3777
3778 /* The operands don't meet the constraints.
3779 goal_alternative describes the alternative
3780 that we could reach by reloading the fewest operands.
3781 Reload so as to fit it. */
3782
3783 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3784 {
3785 /* No alternative works with reloads?? */
3786 if (insn_code_number >= 0)
3787 fatal_insn ("unable to generate reloads for:", insn);
3788 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3789 /* Avoid further trouble with this insn. */
3790 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3791 n_reloads = 0;
3792 return 0;
3793 }
3794
3795 /* Jump to `finish' from above if all operands are valid already.
3796 In that case, goal_alternative_win is all 1. */
3797 finish:
3798
3799 /* Right now, for any pair of operands I and J that are required to match,
3800 with I < J,
3801 goal_alternative_matches[J] is I.
3802 Set up goal_alternative_matched as the inverse function:
3803 goal_alternative_matched[I] = J. */
3804
3805 for (i = 0; i < noperands; i++)
3806 goal_alternative_matched[i] = -1;
3807
3808 for (i = 0; i < noperands; i++)
3809 if (! goal_alternative_win[i]
3810 && goal_alternative_matches[i] >= 0)
3811 goal_alternative_matched[goal_alternative_matches[i]] = i;
3812
3813 for (i = 0; i < noperands; i++)
3814 goal_alternative_win[i] |= goal_alternative_match_win[i];
3815
3816 /* If the best alternative is with operands 1 and 2 swapped,
3817 consider them swapped before reporting the reloads. Update the
3818 operand numbers of any reloads already pushed. */
3819
3820 if (goal_alternative_swapped)
3821 {
3822 std::swap (substed_operand[commutative],
3823 substed_operand[commutative + 1]);
3824 std::swap (recog_data.operand[commutative],
3825 recog_data.operand[commutative + 1]);
3826 std::swap (*recog_data.operand_loc[commutative],
3827 *recog_data.operand_loc[commutative + 1]);
3828
3829 for (i = 0; i < recog_data.n_dups; i++)
3830 if (recog_data.dup_num[i] == commutative
3831 || recog_data.dup_num[i] == commutative + 1)
3832 *recog_data.dup_loc[i]
3833 = recog_data.operand[(int) recog_data.dup_num[i]];
3834
3835 for (i = 0; i < n_reloads; i++)
3836 {
3837 if (rld[i].opnum == commutative)
3838 rld[i].opnum = commutative + 1;
3839 else if (rld[i].opnum == commutative + 1)
3840 rld[i].opnum = commutative;
3841 }
3842 }
3843
3844 for (i = 0; i < noperands; i++)
3845 {
3846 operand_reloadnum[i] = -1;
3847
3848 /* If this is an earlyclobber operand, we need to widen the scope.
3849 The reload must remain valid from the start of the insn being
3850 reloaded until after the operand is stored into its destination.
3851 We approximate this with RELOAD_OTHER even though we know that we
3852 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3853
3854 One special case that is worth checking is when we have an
3855 output that is earlyclobber but isn't used past the insn (typically
3856 a SCRATCH). In this case, we only need have the reload live
3857 through the insn itself, but not for any of our input or output
3858 reloads.
3859 But we must not accidentally narrow the scope of an existing
3860 RELOAD_OTHER reload - leave these alone.
3861
3862 In any case, anything needed to address this operand can remain
3863 however they were previously categorized. */
3864
3865 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3866 operand_type[i]
3867 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3868 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3869 }
3870
3871 /* Any constants that aren't allowed and can't be reloaded
3872 into registers are here changed into memory references. */
3873 for (i = 0; i < noperands; i++)
3874 if (! goal_alternative_win[i])
3875 {
3876 rtx op = recog_data.operand[i];
3877 rtx subreg = NULL_RTX;
3878 rtx plus = NULL_RTX;
3879 machine_mode mode = operand_mode[i];
3880
3881 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3882 push_reload so we have to let them pass here. */
3883 if (GET_CODE (op) == SUBREG)
3884 {
3885 subreg = op;
3886 op = SUBREG_REG (op);
3887 mode = GET_MODE (op);
3888 }
3889
3890 if (GET_CODE (op) == PLUS)
3891 {
3892 plus = op;
3893 op = XEXP (op, 1);
3894 }
3895
3896 if (CONST_POOL_OK_P (mode, op)
3897 && ((targetm.preferred_reload_class (op, goal_alternative[i])
3898 == NO_REGS)
3899 || no_input_reloads))
3900 {
3901 int this_address_reloaded;
3902 rtx tem = force_const_mem (mode, op);
3903
3904 /* If we stripped a SUBREG or a PLUS above add it back. */
3905 if (plus != NULL_RTX)
3906 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3907
3908 if (subreg != NULL_RTX)
3909 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3910
3911 this_address_reloaded = 0;
3912 substed_operand[i] = recog_data.operand[i]
3913 = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3914 0, insn, &this_address_reloaded);
3915
3916 /* If the alternative accepts constant pool refs directly
3917 there will be no reload needed at all. */
3918 if (plus == NULL_RTX
3919 && subreg == NULL_RTX
3920 && alternative_allows_const_pool_ref (this_address_reloaded != 1
3921 ? substed_operand[i]
3922 : NULL,
3923 recog_data.constraints[i],
3924 goal_alternative_number))
3925 goal_alternative_win[i] = 1;
3926 }
3927 }
3928
3929 /* Record the values of the earlyclobber operands for the caller. */
3930 if (goal_earlyclobber)
3931 for (i = 0; i < noperands; i++)
3932 if (goal_alternative_earlyclobber[i])
3933 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3934
3935 /* Now record reloads for all the operands that need them. */
3936 for (i = 0; i < noperands; i++)
3937 if (! goal_alternative_win[i])
3938 {
3939 /* Operands that match previous ones have already been handled. */
3940 if (goal_alternative_matches[i] >= 0)
3941 ;
3942 /* Handle an operand with a nonoffsettable address
3943 appearing where an offsettable address will do
3944 by reloading the address into a base register.
3945
3946 ??? We can also do this when the operand is a register and
3947 reg_equiv_mem is not offsettable, but this is a bit tricky,
3948 so we don't bother with it. It may not be worth doing. */
3949 else if (goal_alternative_matched[i] == -1
3950 && goal_alternative_offmemok[i]
3951 && MEM_P (recog_data.operand[i]))
3952 {
3953 /* If the address to be reloaded is a VOIDmode constant,
3954 use the default address mode as mode of the reload register,
3955 as would have been done by find_reloads_address. */
3956 addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
3957 machine_mode address_mode;
3958
3959 address_mode = get_address_mode (recog_data.operand[i]);
3960 operand_reloadnum[i]
3961 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
3962 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
3963 base_reg_class (VOIDmode, as, MEM, SCRATCH),
3964 address_mode,
3965 VOIDmode, 0, 0, i, RELOAD_OTHER);
3966 rld[operand_reloadnum[i]].inc
3967 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
3968
3969 /* If this operand is an output, we will have made any
3970 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
3971 now we are treating part of the operand as an input, so
3972 we must change these to RELOAD_FOR_OTHER_ADDRESS. */
3973
3974 if (modified[i] == RELOAD_WRITE)
3975 {
3976 for (j = 0; j < n_reloads; j++)
3977 {
3978 if (rld[j].opnum == i)
3979 {
3980 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
3981 rld[j].when_needed = RELOAD_FOR_OTHER_ADDRESS;
3982 else if (rld[j].when_needed
3983 == RELOAD_FOR_OUTADDR_ADDRESS)
3984 rld[j].when_needed = RELOAD_FOR_OTHER_ADDRESS;
3985 }
3986 }
3987 }
3988 }
3989 else if (goal_alternative_matched[i] == -1)
3990 {
3991 operand_reloadnum[i]
3992 = push_reload ((modified[i] != RELOAD_WRITE
3993 ? recog_data.operand[i] : 0),
3994 (modified[i] != RELOAD_READ
3995 ? recog_data.operand[i] : 0),
3996 (modified[i] != RELOAD_WRITE
3997 ? recog_data.operand_loc[i] : 0),
3998 (modified[i] != RELOAD_READ
3999 ? recog_data.operand_loc[i] : 0),
4000 (enum reg_class) goal_alternative[i],
4001 (modified[i] == RELOAD_WRITE
4002 ? VOIDmode : operand_mode[i]),
4003 (modified[i] == RELOAD_READ
4004 ? VOIDmode : operand_mode[i]),
4005 (insn_code_number < 0 ? 0
4006 : insn_data[insn_code_number].operand[i].strict_low),
4007 0, i, operand_type[i]);
4008 }
4009 /* In a matching pair of operands, one must be input only
4010 and the other must be output only.
4011 Pass the input operand as IN and the other as OUT. */
4012 else if (modified[i] == RELOAD_READ
4013 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4014 {
4015 operand_reloadnum[i]
4016 = push_reload (recog_data.operand[i],
4017 recog_data.operand[goal_alternative_matched[i]],
4018 recog_data.operand_loc[i],
4019 recog_data.operand_loc[goal_alternative_matched[i]],
4020 (enum reg_class) goal_alternative[i],
4021 operand_mode[i],
4022 operand_mode[goal_alternative_matched[i]],
4023 0, 0, i, RELOAD_OTHER);
4024 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4025 }
4026 else if (modified[i] == RELOAD_WRITE
4027 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4028 {
4029 operand_reloadnum[goal_alternative_matched[i]]
4030 = push_reload (recog_data.operand[goal_alternative_matched[i]],
4031 recog_data.operand[i],
4032 recog_data.operand_loc[goal_alternative_matched[i]],
4033 recog_data.operand_loc[i],
4034 (enum reg_class) goal_alternative[i],
4035 operand_mode[goal_alternative_matched[i]],
4036 operand_mode[i],
4037 0, 0, i, RELOAD_OTHER);
4038 operand_reloadnum[i] = output_reloadnum;
4039 }
4040 else
4041 {
4042 gcc_assert (insn_code_number < 0);
4043 error_for_asm (insn, "inconsistent operand constraints "
4044 "in an %<asm%>");
4045 /* Avoid further trouble with this insn. */
4046 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4047 n_reloads = 0;
4048 return 0;
4049 }
4050 }
4051 else if (goal_alternative_matched[i] < 0
4052 && goal_alternative_matches[i] < 0
4053 && address_operand_reloaded[i] != 1
4054 && optimize)
4055 {
4056 /* For each non-matching operand that's a MEM or a pseudo-register
4057 that didn't get a hard register, make an optional reload.
4058 This may get done even if the insn needs no reloads otherwise. */
4059
4060 rtx operand = recog_data.operand[i];
4061
4062 while (GET_CODE (operand) == SUBREG)
4063 operand = SUBREG_REG (operand);
4064 if ((MEM_P (operand)
4065 || (REG_P (operand)
4066 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4067 /* If this is only for an output, the optional reload would not
4068 actually cause us to use a register now, just note that
4069 something is stored here. */
4070 && (goal_alternative[i] != NO_REGS
4071 || modified[i] == RELOAD_WRITE)
4072 && ! no_input_reloads
4073 /* An optional output reload might allow to delete INSN later.
4074 We mustn't make in-out reloads on insns that are not permitted
4075 output reloads.
4076 If this is an asm, we can't delete it; we must not even call
4077 push_reload for an optional output reload in this case,
4078 because we can't be sure that the constraint allows a register,
4079 and push_reload verifies the constraints for asms. */
4080 && (modified[i] == RELOAD_READ
4081 || (! no_output_reloads && ! this_insn_is_asm)))
4082 operand_reloadnum[i]
4083 = push_reload ((modified[i] != RELOAD_WRITE
4084 ? recog_data.operand[i] : 0),
4085 (modified[i] != RELOAD_READ
4086 ? recog_data.operand[i] : 0),
4087 (modified[i] != RELOAD_WRITE
4088 ? recog_data.operand_loc[i] : 0),
4089 (modified[i] != RELOAD_READ
4090 ? recog_data.operand_loc[i] : 0),
4091 (enum reg_class) goal_alternative[i],
4092 (modified[i] == RELOAD_WRITE
4093 ? VOIDmode : operand_mode[i]),
4094 (modified[i] == RELOAD_READ
4095 ? VOIDmode : operand_mode[i]),
4096 (insn_code_number < 0 ? 0
4097 : insn_data[insn_code_number].operand[i].strict_low),
4098 1, i, operand_type[i]);
4099 /* If a memory reference remains (either as a MEM or a pseudo that
4100 did not get a hard register), yet we can't make an optional
4101 reload, check if this is actually a pseudo register reference;
4102 we then need to emit a USE and/or a CLOBBER so that reload
4103 inheritance will do the right thing. */
4104 else if (replace
4105 && (MEM_P (operand)
4106 || (REG_P (operand)
4107 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4108 && reg_renumber [REGNO (operand)] < 0)))
4109 {
4110 operand = *recog_data.operand_loc[i];
4111
4112 while (GET_CODE (operand) == SUBREG)
4113 operand = SUBREG_REG (operand);
4114 if (REG_P (operand))
4115 {
4116 if (modified[i] != RELOAD_WRITE)
4117 /* We mark the USE with QImode so that we recognize
4118 it as one that can be safely deleted at the end
4119 of reload. */
4120 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4121 insn), QImode);
4122 if (modified[i] != RELOAD_READ)
4123 emit_insn_after (gen_clobber (operand), insn);
4124 }
4125 }
4126 }
4127 else if (goal_alternative_matches[i] >= 0
4128 && goal_alternative_win[goal_alternative_matches[i]]
4129 && modified[i] == RELOAD_READ
4130 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4131 && ! no_input_reloads && ! no_output_reloads
4132 && optimize)
4133 {
4134 /* Similarly, make an optional reload for a pair of matching
4135 objects that are in MEM or a pseudo that didn't get a hard reg. */
4136
4137 rtx operand = recog_data.operand[i];
4138
4139 while (GET_CODE (operand) == SUBREG)
4140 operand = SUBREG_REG (operand);
4141 if ((MEM_P (operand)
4142 || (REG_P (operand)
4143 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4144 && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4145 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4146 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4147 recog_data.operand[i],
4148 recog_data.operand_loc[goal_alternative_matches[i]],
4149 recog_data.operand_loc[i],
4150 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4151 operand_mode[goal_alternative_matches[i]],
4152 operand_mode[i],
4153 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4154 }
4155
4156 /* Perform whatever substitutions on the operands we are supposed
4157 to make due to commutativity or replacement of registers
4158 with equivalent constants or memory slots. */
4159
4160 for (i = 0; i < noperands; i++)
4161 {
4162 /* We only do this on the last pass through reload, because it is
4163 possible for some data (like reg_equiv_address) to be changed during
4164 later passes. Moreover, we lose the opportunity to get a useful
4165 reload_{in,out}_reg when we do these replacements. */
4166
4167 if (replace)
4168 {
4169 rtx substitution = substed_operand[i];
4170
4171 *recog_data.operand_loc[i] = substitution;
4172
4173 /* If we're replacing an operand with a LABEL_REF, we need to
4174 make sure that there's a REG_LABEL_OPERAND note attached to
4175 this instruction. */
4176 if (GET_CODE (substitution) == LABEL_REF
4177 && !find_reg_note (insn, REG_LABEL_OPERAND,
4178 label_ref_label (substitution))
4179 /* For a JUMP_P, if it was a branch target it must have
4180 already been recorded as such. */
4181 && (!JUMP_P (insn)
4182 || !label_is_jump_target_p (label_ref_label (substitution),
4183 insn)))
4184 {
4185 add_reg_note (insn, REG_LABEL_OPERAND,
4186 label_ref_label (substitution));
4187 if (LABEL_P (label_ref_label (substitution)))
4188 ++LABEL_NUSES (label_ref_label (substitution));
4189 }
4190
4191 }
4192 else
4193 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4194 }
4195
4196 /* If this insn pattern contains any MATCH_DUP's, make sure that
4197 they will be substituted if the operands they match are substituted.
4198 Also do now any substitutions we already did on the operands.
4199
4200 Don't do this if we aren't making replacements because we might be
4201 propagating things allocated by frame pointer elimination into places
4202 it doesn't expect. */
4203
4204 if (insn_code_number >= 0 && replace)
4205 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4206 {
4207 int opno = recog_data.dup_num[i];
4208 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4209 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4210 }
4211
4212#if 0
4213 /* This loses because reloading of prior insns can invalidate the equivalence
4214 (or at least find_equiv_reg isn't smart enough to find it any more),
4215 causing this insn to need more reload regs than it needed before.
4216 It may be too late to make the reload regs available.
4217 Now this optimization is done safely in choose_reload_regs. */
4218
4219 /* For each reload of a reg into some other class of reg,
4220 search for an existing equivalent reg (same value now) in the right class.
4221 We can use it as long as we don't need to change its contents. */
4222 for (i = 0; i < n_reloads; i++)
4223 if (rld[i].reg_rtx == 0
4224 && rld[i].in != 0
4225 && REG_P (rld[i].in)
4226 && rld[i].out == 0)
4227 {
4228 rld[i].reg_rtx
4229 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4230 static_reload_reg_p, 0, rld[i].inmode);
4231 /* Prevent generation of insn to load the value
4232 because the one we found already has the value. */
4233 if (rld[i].reg_rtx)
4234 rld[i].in = rld[i].reg_rtx;
4235 }
4236#endif
4237
4238 /* If we detected error and replaced asm instruction by USE, forget about the
4239 reloads. */
4240 if (GET_CODE (PATTERN (insn)) == USE
4241 && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4242 n_reloads = 0;
4243
4244 /* Perhaps an output reload can be combined with another
4245 to reduce needs by one. */
4246 if (!goal_earlyclobber)
4247 combine_reloads ();
4248
4249 /* If we have a pair of reloads for parts of an address, they are reloading
4250 the same object, the operands themselves were not reloaded, and they
4251 are for two operands that are supposed to match, merge the reloads and
4252 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4253
4254 for (i = 0; i < n_reloads; i++)
4255 {
4256 int k;
4257
4258 for (j = i + 1; j < n_reloads; j++)
4259 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4260 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4261 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4262 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4263 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4264 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4265 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4266 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4267 && rtx_equal_p (rld[i].in, rld[j].in)
4268 && (operand_reloadnum[rld[i].opnum] < 0
4269 || rld[operand_reloadnum[rld[i].opnum]].optional)
4270 && (operand_reloadnum[rld[j].opnum] < 0
4271 || rld[operand_reloadnum[rld[j].opnum]].optional)
4272 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4273 || (goal_alternative_matches[rld[j].opnum]
4274 == rld[i].opnum)))
4275 {
4276 for (k = 0; k < n_replacements; k++)
4277 if (replacements[k].what == j)
4278 replacements[k].what = i;
4279
4280 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4281 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4282 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4283 else
4284 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4285 rld[j].in = 0;
4286 }
4287 }
4288
4289 /* Scan all the reloads and update their type.
4290 If a reload is for the address of an operand and we didn't reload
4291 that operand, change the type. Similarly, change the operand number
4292 of a reload when two operands match. If a reload is optional, treat it
4293 as though the operand isn't reloaded.
4294
4295 ??? This latter case is somewhat odd because if we do the optional
4296 reload, it means the object is hanging around. Thus we need only
4297 do the address reload if the optional reload was NOT done.
4298
4299 Change secondary reloads to be the address type of their operand, not
4300 the normal type.
4301
4302 If an operand's reload is now RELOAD_OTHER, change any
4303 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4304 RELOAD_FOR_OTHER_ADDRESS. */
4305
4306 for (i = 0; i < n_reloads; i++)
4307 {
4308 if (rld[i].secondary_p
4309 && rld[i].when_needed == operand_type[rld[i].opnum])
4310 rld[i].when_needed = address_type[rld[i].opnum];
4311
4312 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4313 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4314 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4315 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4316 && (operand_reloadnum[rld[i].opnum] < 0
4317 || rld[operand_reloadnum[rld[i].opnum]].optional))
4318 {
4319 /* If we have a secondary reload to go along with this reload,
4320 change its type to RELOAD_FOR_OPADDR_ADDR. */
4321
4322 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4323 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4324 && rld[i].secondary_in_reload != -1)
4325 {
4326 int secondary_in_reload = rld[i].secondary_in_reload;
4327
4328 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4329
4330 /* If there's a tertiary reload we have to change it also. */
4331 if (secondary_in_reload > 0
4332 && rld[secondary_in_reload].secondary_in_reload != -1)
4333 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4334 = RELOAD_FOR_OPADDR_ADDR;
4335 }
4336
4337 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4338 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4339 && rld[i].secondary_out_reload != -1)
4340 {
4341 int secondary_out_reload = rld[i].secondary_out_reload;
4342
4343 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4344
4345 /* If there's a tertiary reload we have to change it also. */
4346 if (secondary_out_reload
4347 && rld[secondary_out_reload].secondary_out_reload != -1)
4348 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4349 = RELOAD_FOR_OPADDR_ADDR;
4350 }
4351
4352 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4353 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4354 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4355 else
4356 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4357 }
4358
4359 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4360 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4361 && operand_reloadnum[rld[i].opnum] >= 0
4362 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4363 == RELOAD_OTHER))
4364 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4365
4366 if (goal_alternative_matches[rld[i].opnum] >= 0)
4367 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4368 }
4369
4370 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4371 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4372 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4373
4374 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4375 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4376 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4377 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4378 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4379 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4380 This is complicated by the fact that a single operand can have more
4381 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4382 choose_reload_regs without affecting code quality, and cases that
4383 actually fail are extremely rare, so it turns out to be better to fix
4384 the problem here by not generating cases that choose_reload_regs will
4385 fail for. */
4386 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4387 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4388 a single operand.
4389 We can reduce the register pressure by exploiting that a
4390 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4391 does not conflict with any of them, if it is only used for the first of
4392 the RELOAD_FOR_X_ADDRESS reloads. */
4393 {
4394 int first_op_addr_num = -2;
4395 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4396 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4397 int need_change = 0;
4398 /* We use last_op_addr_reload and the contents of the above arrays
4399 first as flags - -2 means no instance encountered, -1 means exactly
4400 one instance encountered.
4401 If more than one instance has been encountered, we store the reload
4402 number of the first reload of the kind in question; reload numbers
4403 are known to be non-negative. */
4404 for (i = 0; i < noperands; i++)
4405 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4406 for (i = n_reloads - 1; i >= 0; i--)
4407 {
4408 switch (rld[i].when_needed)
4409 {
4410 case RELOAD_FOR_OPERAND_ADDRESS:
4411 if (++first_op_addr_num >= 0)
4412 {
4413 first_op_addr_num =