1/* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987-2024 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20/* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
24
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
28
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
32 not get a hard register. */
33
34#include "config.h"
35#include "system.h"
36#include "coretypes.h"
37#include "backend.h"
38#include "target.h"
39#include "rtl.h"
40#include "tree.h"
41#include "gimple-expr.h"
42#include "cfghooks.h"
43#include "df.h"
44#include "memmodel.h"
45#include "tm_p.h"
46#include "stringpool.h"
47#include "expmed.h"
48#include "optabs.h"
49#include "opts.h"
50#include "regs.h"
51#include "emit-rtl.h"
52#include "recog.h"
53#include "rtl-error.h"
54#include "hard-reg-set.h"
55#include "alias.h"
56#include "fold-const.h"
57#include "stor-layout.h"
58#include "varasm.h"
59#include "except.h"
60#include "dojump.h"
61#include "explow.h"
62#include "calls.h"
63#include "expr.h"
64#include "optabs-tree.h"
65#include "output.h"
66#include "langhooks.h"
67#include "common/common-target.h"
68#include "gimplify.h"
69#include "tree-pass.h"
70#include "cfgrtl.h"
71#include "cfganal.h"
72#include "cfgbuild.h"
73#include "cfgcleanup.h"
74#include "cfgexpand.h"
75#include "shrink-wrap.h"
76#include "toplev.h"
77#include "rtl-iter.h"
78#include "tree-dfa.h"
79#include "tree-ssa.h"
80#include "stringpool.h"
81#include "attribs.h"
82#include "gimple.h"
83#include "options.h"
84#include "function-abi.h"
85#include "value-range.h"
86#include "gimple-range.h"
87#include "insn-attr.h"
88
89/* So we can assign to cfun in this file. */
90#undef cfun
91
92#ifndef STACK_ALIGNMENT_NEEDED
93#define STACK_ALIGNMENT_NEEDED 1
94#endif
95
96#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
97
98/* Round a value to the lowest integer less than it that is a multiple of
99 the required alignment. Avoid using division in case the value is
100 negative. Assume the alignment is a power of two. */
101#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
102
103/* Similar, but round to the next highest integer that meets the
104 alignment. */
105#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
106
107/* Nonzero once virtual register instantiation has been done.
108 assign_stack_local uses frame_pointer_rtx when this is nonzero.
109 calls.cc:emit_library_call_value_1 uses it to set up
110 post-instantiation libcalls. */
111int virtuals_instantiated;
112
113/* Assign unique numbers to labels generated for profiling, debugging, etc. */
114static GTY(()) int funcdef_no;
115
116/* These variables hold pointers to functions to create and destroy
117 target specific, per-function data structures. */
118struct machine_function * (*init_machine_status) (void);
119
120/* The currently compiled function. */
121struct function *cfun = 0;
122
123/* These hashes record the prologue and epilogue insns. */
124
125struct insn_cache_hasher : ggc_cache_ptr_hash<rtx_def>
126{
127 static hashval_t hash (rtx x) { return htab_hash_pointer (x); }
128 static bool equal (rtx a, rtx b) { return a == b; }
129};
130
131static GTY((cache))
132 hash_table<insn_cache_hasher> *prologue_insn_hash;
133static GTY((cache))
134 hash_table<insn_cache_hasher> *epilogue_insn_hash;
135
136
137hash_table<used_type_hasher> *types_used_by_vars_hash = NULL;
138vec<tree, va_gc> *types_used_by_cur_var_decl;
139
140/* Forward declarations. */
141
142static class temp_slot *find_temp_slot_from_address (rtx);
143static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
144static void pad_below (struct args_size *, machine_mode, tree);
145static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
146static int all_blocks (tree, tree *);
147static tree *get_block_vector (tree, int *);
148extern tree debug_find_var_in_block_tree (tree, tree);
149/* We always define `record_insns' even if it's not used so that we
150 can always export `prologue_epilogue_contains'. */
151static void record_insns (rtx_insn *, rtx, hash_table<insn_cache_hasher> **)
152 ATTRIBUTE_UNUSED;
153static bool contains (const rtx_insn *, hash_table<insn_cache_hasher> *);
154static void prepare_function_start (void);
155static void do_clobber_return_reg (rtx, void *);
156static void do_use_return_reg (rtx, void *);
157
158
159/* Stack of nested functions. */
160/* Keep track of the cfun stack. */
161
162static vec<function *> function_context_stack;
163
164/* Save the current context for compilation of a nested function.
165 This is called from language-specific code. */
166
167void
168push_function_context (void)
169{
170 if (cfun == 0)
171 allocate_struct_function (NULL, false);
172
173 function_context_stack.safe_push (obj: cfun);
174 set_cfun (NULL);
175}
176
177/* Restore the last saved context, at the end of a nested function.
178 This function is called from language-specific code. */
179
180void
181pop_function_context (void)
182{
183 struct function *p = function_context_stack.pop ();
184 set_cfun (new_cfun: p);
185 current_function_decl = p->decl;
186
187 /* Reset variables that have known state during rtx generation. */
188 virtuals_instantiated = 0;
189 generating_concat_p = 1;
190}
191
192/* Clear out all parts of the state in F that can safely be discarded
193 after the function has been parsed, but not compiled, to let
194 garbage collection reclaim the memory. */
195
196void
197free_after_parsing (struct function *f)
198{
199 f->language = 0;
200}
201
202/* Clear out all parts of the state in F that can safely be discarded
203 after the function has been compiled, to let garbage collection
204 reclaim the memory. */
205
206void
207free_after_compilation (struct function *f)
208{
209 prologue_insn_hash = NULL;
210 epilogue_insn_hash = NULL;
211
212 free (crtl->emit.regno_pointer_align);
213
214 memset (crtl, c: 0, n: sizeof (struct rtl_data));
215 f->eh = NULL;
216 f->machine = NULL;
217 f->cfg = NULL;
218 f->curr_properties &= ~PROP_cfg;
219 delete f->cond_uids;
220
221 regno_reg_rtx = NULL;
222}
223
224/* Return size needed for stack frame based on slots so far allocated.
225 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
226 the caller may have to do that. */
227
228poly_int64
229get_frame_size (void)
230{
231 if (FRAME_GROWS_DOWNWARD)
232 return -frame_offset;
233 else
234 return frame_offset;
235}
236
237/* Issue an error message and return TRUE if frame OFFSET overflows in
238 the signed target pointer arithmetics for function FUNC. Otherwise
239 return FALSE. */
240
241bool
242frame_offset_overflow (poly_int64 offset, tree func)
243{
244 poly_uint64 size = FRAME_GROWS_DOWNWARD ? -offset : offset;
245 unsigned HOST_WIDE_INT limit
246 = ((HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (Pmode) - 1))
247 /* Leave room for the fixed part of the frame. */
248 - 64 * UNITS_PER_WORD);
249
250 if (!coeffs_in_range_p (a: size, b: 0U, c: limit))
251 {
252 unsigned HOST_WIDE_INT hwisize;
253 if (size.is_constant (const_value: &hwisize))
254 error_at (DECL_SOURCE_LOCATION (func),
255 "total size of local objects %wu exceeds maximum %wu",
256 hwisize, limit);
257 else
258 error_at (DECL_SOURCE_LOCATION (func),
259 "total size of local objects exceeds maximum %wu",
260 limit);
261 return true;
262 }
263
264 return false;
265}
266
267/* Return the minimum spill slot alignment for a register of mode MODE. */
268
269unsigned int
270spill_slot_alignment (machine_mode mode ATTRIBUTE_UNUSED)
271{
272 return STACK_SLOT_ALIGNMENT (NULL_TREE, mode, GET_MODE_ALIGNMENT (mode));
273}
274
275/* Return stack slot alignment in bits for TYPE and MODE. */
276
277static unsigned int
278get_stack_local_alignment (tree type, machine_mode mode)
279{
280 unsigned int alignment;
281
282 if (mode == BLKmode)
283 alignment = BIGGEST_ALIGNMENT;
284 else
285 alignment = GET_MODE_ALIGNMENT (mode);
286
287 /* Allow the frond-end to (possibly) increase the alignment of this
288 stack slot. */
289 if (! type)
290 type = lang_hooks.types.type_for_mode (mode, 0);
291
292 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
293}
294
295/* Determine whether it is possible to fit a stack slot of size SIZE and
296 alignment ALIGNMENT into an area in the stack frame that starts at
297 frame offset START and has a length of LENGTH. If so, store the frame
298 offset to be used for the stack slot in *POFFSET and return true;
299 return false otherwise. This function will extend the frame size when
300 given a start/length pair that lies at the end of the frame. */
301
302static bool
303try_fit_stack_local (poly_int64 start, poly_int64 length,
304 poly_int64 size, unsigned int alignment,
305 poly_int64 *poffset)
306{
307 poly_int64 this_frame_offset;
308 int frame_off, frame_alignment, frame_phase;
309
310 /* Calculate how many bytes the start of local variables is off from
311 stack alignment. */
312 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
313 frame_off = targetm.starting_frame_offset () % frame_alignment;
314 frame_phase = frame_off ? frame_alignment - frame_off : 0;
315
316 /* Round the frame offset to the specified alignment. */
317
318 if (FRAME_GROWS_DOWNWARD)
319 this_frame_offset
320 = (aligned_lower_bound (value: start + length - size - frame_phase, align: alignment)
321 + frame_phase);
322 else
323 this_frame_offset
324 = aligned_upper_bound (value: start - frame_phase, align: alignment) + frame_phase;
325
326 /* See if it fits. If this space is at the edge of the frame,
327 consider extending the frame to make it fit. Our caller relies on
328 this when allocating a new slot. */
329 if (maybe_lt (a: this_frame_offset, b: start))
330 {
331 if (known_eq (frame_offset, start))
332 frame_offset = this_frame_offset;
333 else
334 return false;
335 }
336 else if (maybe_gt (this_frame_offset + size, start + length))
337 {
338 if (known_eq (frame_offset, start + length))
339 frame_offset = this_frame_offset + size;
340 else
341 return false;
342 }
343
344 *poffset = this_frame_offset;
345 return true;
346}
347
348/* Create a new frame_space structure describing free space in the stack
349 frame beginning at START and ending at END, and chain it into the
350 function's frame_space_list. */
351
352static void
353add_frame_space (poly_int64 start, poly_int64 end)
354{
355 class frame_space *space = ggc_alloc<frame_space> ();
356 space->next = crtl->frame_space_list;
357 crtl->frame_space_list = space;
358 space->start = start;
359 space->length = end - start;
360}
361
362/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
363 with machine mode MODE.
364
365 ALIGN controls the amount of alignment for the address of the slot:
366 0 means according to MODE,
367 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
368 -2 means use BITS_PER_UNIT,
369 positive specifies alignment boundary in bits.
370
371 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
372 alignment and ASLK_RECORD_PAD bit set if we should remember
373 extra space we allocated for alignment purposes. When we are
374 called from assign_stack_temp_for_type, it is not set so we don't
375 track the same stack slot in two independent lists.
376
377 We do not round to stack_boundary here. */
378
379rtx
380assign_stack_local_1 (machine_mode mode, poly_int64 size,
381 int align, int kind)
382{
383 rtx x, addr;
384 poly_int64 bigend_correction = 0;
385 poly_int64 slot_offset = 0, old_frame_offset;
386 unsigned int alignment, alignment_in_bits;
387
388 if (align == 0)
389 {
390 alignment = get_stack_local_alignment (NULL, mode);
391 alignment /= BITS_PER_UNIT;
392 }
393 else if (align == -1)
394 {
395 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
396 size = aligned_upper_bound (value: size, align: alignment);
397 }
398 else if (align == -2)
399 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
400 else
401 alignment = align / BITS_PER_UNIT;
402
403 alignment_in_bits = alignment * BITS_PER_UNIT;
404
405 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
406 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
407 {
408 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
409 alignment = MAX_SUPPORTED_STACK_ALIGNMENT / BITS_PER_UNIT;
410 }
411
412 if (SUPPORTS_STACK_ALIGNMENT)
413 {
414 if (crtl->stack_alignment_estimated < alignment_in_bits)
415 {
416 if (!crtl->stack_realign_processed)
417 crtl->stack_alignment_estimated = alignment_in_bits;
418 else
419 {
420 /* If stack is realigned and stack alignment value
421 hasn't been finalized, it is OK not to increase
422 stack_alignment_estimated. The bigger alignment
423 requirement is recorded in stack_alignment_needed
424 below. */
425 gcc_assert (!crtl->stack_realign_finalized);
426 if (!crtl->stack_realign_needed)
427 {
428 /* It is OK to reduce the alignment as long as the
429 requested size is 0 or the estimated stack
430 alignment >= mode alignment. */
431 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
432 || known_eq (size, 0)
433 || (crtl->stack_alignment_estimated
434 >= GET_MODE_ALIGNMENT (mode)));
435 alignment_in_bits = crtl->stack_alignment_estimated;
436 alignment = alignment_in_bits / BITS_PER_UNIT;
437 }
438 }
439 }
440 }
441
442 if (crtl->stack_alignment_needed < alignment_in_bits)
443 crtl->stack_alignment_needed = alignment_in_bits;
444 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
445 crtl->max_used_stack_slot_alignment = alignment_in_bits;
446
447 if (mode != BLKmode || maybe_ne (a: size, b: 0))
448 {
449 if (kind & ASLK_RECORD_PAD)
450 {
451 class frame_space **psp;
452
453 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
454 {
455 class frame_space *space = *psp;
456 if (!try_fit_stack_local (start: space->start, length: space->length, size,
457 alignment, poffset: &slot_offset))
458 continue;
459 *psp = space->next;
460 if (known_gt (slot_offset, space->start))
461 add_frame_space (start: space->start, end: slot_offset);
462 if (known_lt (slot_offset + size, space->start + space->length))
463 add_frame_space (start: slot_offset + size,
464 end: space->start + space->length);
465 goto found_space;
466 }
467 }
468 }
469 else if (!STACK_ALIGNMENT_NEEDED)
470 {
471 slot_offset = frame_offset;
472 goto found_space;
473 }
474
475 old_frame_offset = frame_offset;
476
477 if (FRAME_GROWS_DOWNWARD)
478 {
479 frame_offset -= size;
480 try_fit_stack_local (frame_offset, length: size, size, alignment, poffset: &slot_offset);
481
482 if (kind & ASLK_RECORD_PAD)
483 {
484 if (known_gt (slot_offset, frame_offset))
485 add_frame_space (frame_offset, end: slot_offset);
486 if (known_lt (slot_offset + size, old_frame_offset))
487 add_frame_space (start: slot_offset + size, end: old_frame_offset);
488 }
489 }
490 else
491 {
492 frame_offset += size;
493 try_fit_stack_local (start: old_frame_offset, length: size, size, alignment, poffset: &slot_offset);
494
495 if (kind & ASLK_RECORD_PAD)
496 {
497 if (known_gt (slot_offset, old_frame_offset))
498 add_frame_space (start: old_frame_offset, end: slot_offset);
499 if (known_lt (slot_offset + size, frame_offset))
500 add_frame_space (start: slot_offset + size, frame_offset);
501 }
502 }
503
504 found_space:
505 /* On a big-endian machine, if we are allocating more space than we will use,
506 use the least significant bytes of those that are allocated. */
507 if (mode != BLKmode)
508 {
509 /* The slot size can sometimes be smaller than the mode size;
510 e.g. the rs6000 port allocates slots with a vector mode
511 that have the size of only one element. However, the slot
512 size must always be ordered wrt to the mode size, in the
513 same way as for a subreg. */
514 gcc_checking_assert (ordered_p (GET_MODE_SIZE (mode), size));
515 if (BYTES_BIG_ENDIAN && maybe_lt (a: GET_MODE_SIZE (mode), b: size))
516 bigend_correction = size - GET_MODE_SIZE (mode);
517 }
518
519 /* If we have already instantiated virtual registers, return the actual
520 address relative to the frame pointer. */
521 if (virtuals_instantiated)
522 addr = plus_constant (Pmode, frame_pointer_rtx,
523 trunc_int_for_mode
524 (slot_offset + bigend_correction
525 + targetm.starting_frame_offset (), Pmode));
526 else
527 addr = plus_constant (Pmode, virtual_stack_vars_rtx,
528 trunc_int_for_mode
529 (slot_offset + bigend_correction,
530 Pmode));
531
532 x = gen_rtx_MEM (mode, addr);
533 set_mem_align (x, alignment_in_bits);
534 MEM_NOTRAP_P (x) = 1;
535
536 vec_safe_push (stack_slot_list, obj: x);
537
538 if (frame_offset_overflow (frame_offset, func: current_function_decl))
539 frame_offset = 0;
540
541 return x;
542}
543
544/* Wrap up assign_stack_local_1 with last parameter as false. */
545
546rtx
547assign_stack_local (machine_mode mode, poly_int64 size, int align)
548{
549 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
550}
551
552/* In order to evaluate some expressions, such as function calls returning
553 structures in memory, we need to temporarily allocate stack locations.
554 We record each allocated temporary in the following structure.
555
556 Associated with each temporary slot is a nesting level. When we pop up
557 one level, all temporaries associated with the previous level are freed.
558 Normally, all temporaries are freed after the execution of the statement
559 in which they were created. However, if we are inside a ({...}) grouping,
560 the result may be in a temporary and hence must be preserved. If the
561 result could be in a temporary, we preserve it if we can determine which
562 one it is in. If we cannot determine which temporary may contain the
563 result, all temporaries are preserved. A temporary is preserved by
564 pretending it was allocated at the previous nesting level. */
565
566class GTY(()) temp_slot {
567public:
568 /* Points to next temporary slot. */
569 class temp_slot *next;
570 /* Points to previous temporary slot. */
571 class temp_slot *prev;
572 /* The rtx to used to reference the slot. */
573 rtx slot;
574 /* The size, in units, of the slot. */
575 poly_int64 size;
576 /* The type of the object in the slot, or zero if it doesn't correspond
577 to a type. We use this to determine whether a slot can be reused.
578 It can be reused if objects of the type of the new slot will always
579 conflict with objects of the type of the old slot. */
580 tree type;
581 /* The alignment (in bits) of the slot. */
582 unsigned int align;
583 /* True if this temporary is currently in use. */
584 bool in_use;
585 /* Nesting level at which this slot is being used. */
586 int level;
587 /* The offset of the slot from the frame_pointer, including extra space
588 for alignment. This info is for combine_temp_slots. */
589 poly_int64 base_offset;
590 /* The size of the slot, including extra space for alignment. This
591 info is for combine_temp_slots. */
592 poly_int64 full_size;
593};
594
595/* Entry for the below hash table. */
596struct GTY((for_user)) temp_slot_address_entry {
597 hashval_t hash;
598 rtx address;
599 class temp_slot *temp_slot;
600};
601
602struct temp_address_hasher : ggc_ptr_hash<temp_slot_address_entry>
603{
604 static hashval_t hash (temp_slot_address_entry *);
605 static bool equal (temp_slot_address_entry *, temp_slot_address_entry *);
606};
607
608/* A table of addresses that represent a stack slot. The table is a mapping
609 from address RTXen to a temp slot. */
610static GTY(()) hash_table<temp_address_hasher> *temp_slot_address_table;
611static size_t n_temp_slots_in_use;
612
613/* Removes temporary slot TEMP from LIST. */
614
615static void
616cut_slot_from_list (class temp_slot *temp, class temp_slot **list)
617{
618 if (temp->next)
619 temp->next->prev = temp->prev;
620 if (temp->prev)
621 temp->prev->next = temp->next;
622 else
623 *list = temp->next;
624
625 temp->prev = temp->next = NULL;
626}
627
628/* Inserts temporary slot TEMP to LIST. */
629
630static void
631insert_slot_to_list (class temp_slot *temp, class temp_slot **list)
632{
633 temp->next = *list;
634 if (*list)
635 (*list)->prev = temp;
636 temp->prev = NULL;
637 *list = temp;
638}
639
640/* Returns the list of used temp slots at LEVEL. */
641
642static class temp_slot **
643temp_slots_at_level (int level)
644{
645 if (level >= (int) vec_safe_length (used_temp_slots))
646 vec_safe_grow_cleared (used_temp_slots, len: level + 1, exact: true);
647
648 return &(*used_temp_slots)[level];
649}
650
651/* Returns the maximal temporary slot level. */
652
653static int
654max_slot_level (void)
655{
656 if (!used_temp_slots)
657 return -1;
658
659 return used_temp_slots->length () - 1;
660}
661
662/* Moves temporary slot TEMP to LEVEL. */
663
664static void
665move_slot_to_level (class temp_slot *temp, int level)
666{
667 cut_slot_from_list (temp, list: temp_slots_at_level (level: temp->level));
668 insert_slot_to_list (temp, list: temp_slots_at_level (level));
669 temp->level = level;
670}
671
672/* Make temporary slot TEMP available. */
673
674static void
675make_slot_available (class temp_slot *temp)
676{
677 cut_slot_from_list (temp, list: temp_slots_at_level (level: temp->level));
678 insert_slot_to_list (temp, list: &avail_temp_slots);
679 temp->in_use = false;
680 temp->level = -1;
681 n_temp_slots_in_use--;
682}
683
684/* Compute the hash value for an address -> temp slot mapping.
685 The value is cached on the mapping entry. */
686static hashval_t
687temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
688{
689 int do_not_record = 0;
690 return hash_rtx (t->address, GET_MODE (t->address),
691 &do_not_record, NULL, false);
692}
693
694/* Return the hash value for an address -> temp slot mapping. */
695hashval_t
696temp_address_hasher::hash (temp_slot_address_entry *t)
697{
698 return t->hash;
699}
700
701/* Compare two address -> temp slot mapping entries. */
702bool
703temp_address_hasher::equal (temp_slot_address_entry *t1,
704 temp_slot_address_entry *t2)
705{
706 return exp_equiv_p (t1->address, t2->address, 0, true);
707}
708
709/* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
710static void
711insert_temp_slot_address (rtx address, class temp_slot *temp_slot)
712{
713 struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
714 t->address = copy_rtx (address);
715 t->temp_slot = temp_slot;
716 t->hash = temp_slot_address_compute_hash (t);
717 *temp_slot_address_table->find_slot_with_hash (comparable: t, hash: t->hash, insert: INSERT) = t;
718}
719
720/* Remove an address -> temp slot mapping entry if the temp slot is
721 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
722int
723remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *)
724{
725 const struct temp_slot_address_entry *t = *slot;
726 if (! t->temp_slot->in_use)
727 temp_slot_address_table->clear_slot (slot);
728 return 1;
729}
730
731/* Remove all mappings of addresses to unused temp slots. */
732static void
733remove_unused_temp_slot_addresses (void)
734{
735 /* Use quicker clearing if there aren't any active temp slots. */
736 if (n_temp_slots_in_use)
737 temp_slot_address_table->traverse
738 <void *, remove_unused_temp_slot_addresses_1> (NULL);
739 else
740 temp_slot_address_table->empty ();
741}
742
743/* Find the temp slot corresponding to the object at address X. */
744
745static class temp_slot *
746find_temp_slot_from_address (rtx x)
747{
748 class temp_slot *p;
749 struct temp_slot_address_entry tmp, *t;
750
751 /* First try the easy way:
752 See if X exists in the address -> temp slot mapping. */
753 tmp.address = x;
754 tmp.temp_slot = NULL;
755 tmp.hash = temp_slot_address_compute_hash (t: &tmp);
756 t = temp_slot_address_table->find_with_hash (comparable: &tmp, hash: tmp.hash);
757 if (t)
758 return t->temp_slot;
759
760 /* If we have a sum involving a register, see if it points to a temp
761 slot. */
762 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
763 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
764 return p;
765 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
766 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
767 return p;
768
769 /* Last resort: Address is a virtual stack var address. */
770 poly_int64 offset;
771 if (strip_offset (x, &offset) == virtual_stack_vars_rtx)
772 {
773 int i;
774 for (i = max_slot_level (); i >= 0; i--)
775 for (p = *temp_slots_at_level (level: i); p; p = p->next)
776 if (known_in_range_p (val: offset, pos: p->base_offset, size: p->full_size))
777 return p;
778 }
779
780 return NULL;
781}
782
783/* Allocate a temporary stack slot and record it for possible later
784 reuse.
785
786 MODE is the machine mode to be given to the returned rtx.
787
788 SIZE is the size in units of the space required. We do no rounding here
789 since assign_stack_local will do any required rounding.
790
791 TYPE is the type that will be used for the stack slot. */
792
793rtx
794assign_stack_temp_for_type (machine_mode mode, poly_int64 size, tree type)
795{
796 unsigned int align;
797 class temp_slot *p, *best_p = 0, *selected = NULL, **pp;
798 rtx slot;
799
800 gcc_assert (known_size_p (size));
801
802 align = get_stack_local_alignment (type, mode);
803
804 /* Try to find an available, already-allocated temporary of the proper
805 mode which meets the size and alignment requirements. Choose the
806 smallest one with the closest alignment.
807
808 If assign_stack_temp is called outside of the tree->rtl expansion,
809 we cannot reuse the stack slots (that may still refer to
810 VIRTUAL_STACK_VARS_REGNUM). */
811 if (!virtuals_instantiated)
812 {
813 for (p = avail_temp_slots; p; p = p->next)
814 {
815 if (p->align >= align
816 && known_ge (p->size, size)
817 && GET_MODE (p->slot) == mode
818 && objects_must_conflict_p (p->type, type)
819 && (best_p == 0
820 || (known_eq (best_p->size, p->size)
821 ? best_p->align > p->align
822 : known_ge (best_p->size, p->size))))
823 {
824 if (p->align == align && known_eq (p->size, size))
825 {
826 selected = p;
827 cut_slot_from_list (temp: selected, list: &avail_temp_slots);
828 best_p = 0;
829 break;
830 }
831 best_p = p;
832 }
833 }
834 }
835
836 /* Make our best, if any, the one to use. */
837 if (best_p)
838 {
839 selected = best_p;
840 cut_slot_from_list (temp: selected, list: &avail_temp_slots);
841
842 /* If there are enough aligned bytes left over, make them into a new
843 temp_slot so that the extra bytes don't get wasted. Do this only
844 for BLKmode slots, so that we can be sure of the alignment. */
845 if (GET_MODE (best_p->slot) == BLKmode)
846 {
847 int alignment = best_p->align / BITS_PER_UNIT;
848 poly_int64 rounded_size = aligned_upper_bound (value: size, align: alignment);
849
850 if (known_ge (best_p->size - rounded_size, alignment))
851 {
852 p = ggc_alloc<temp_slot> ();
853 p->in_use = false;
854 p->size = best_p->size - rounded_size;
855 p->base_offset = best_p->base_offset + rounded_size;
856 p->full_size = best_p->full_size - rounded_size;
857 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
858 p->align = best_p->align;
859 p->type = best_p->type;
860 insert_slot_to_list (temp: p, list: &avail_temp_slots);
861
862 vec_safe_push (stack_slot_list, obj: p->slot);
863
864 best_p->size = rounded_size;
865 best_p->full_size = rounded_size;
866 }
867 }
868 }
869
870 /* If we still didn't find one, make a new temporary. */
871 if (selected == 0)
872 {
873 poly_int64 frame_offset_old = frame_offset;
874
875 p = ggc_alloc<temp_slot> ();
876
877 /* We are passing an explicit alignment request to assign_stack_local.
878 One side effect of that is assign_stack_local will not round SIZE
879 to ensure the frame offset remains suitably aligned.
880
881 So for requests which depended on the rounding of SIZE, we go ahead
882 and round it now. We also make sure ALIGNMENT is at least
883 BIGGEST_ALIGNMENT. */
884 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
885 p->slot = assign_stack_local_1 (mode,
886 size: (mode == BLKmode
887 ? aligned_upper_bound (value: size,
888 align: (int) align
889 / BITS_PER_UNIT)
890 : size),
891 align, kind: 0);
892
893 p->align = align;
894
895 /* The following slot size computation is necessary because we don't
896 know the actual size of the temporary slot until assign_stack_local
897 has performed all the frame alignment and size rounding for the
898 requested temporary. Note that extra space added for alignment
899 can be either above or below this stack slot depending on which
900 way the frame grows. We include the extra space if and only if it
901 is above this slot. */
902 if (FRAME_GROWS_DOWNWARD)
903 p->size = frame_offset_old - frame_offset;
904 else
905 p->size = size;
906
907 /* Now define the fields used by combine_temp_slots. */
908 if (FRAME_GROWS_DOWNWARD)
909 {
910 p->base_offset = frame_offset;
911 p->full_size = frame_offset_old - frame_offset;
912 }
913 else
914 {
915 p->base_offset = frame_offset_old;
916 p->full_size = frame_offset - frame_offset_old;
917 }
918
919 selected = p;
920 }
921
922 p = selected;
923 p->in_use = true;
924 p->type = type;
925 p->level = temp_slot_level;
926 n_temp_slots_in_use++;
927
928 pp = temp_slots_at_level (level: p->level);
929 insert_slot_to_list (temp: p, list: pp);
930 insert_temp_slot_address (XEXP (p->slot, 0), temp_slot: p);
931
932 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
933 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
934 vec_safe_push (stack_slot_list, obj: slot);
935
936 /* If we know the alias set for the memory that will be used, use
937 it. If there's no TYPE, then we don't know anything about the
938 alias set for the memory. */
939 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
940 set_mem_align (slot, align);
941
942 /* If a type is specified, set the relevant flags. */
943 if (type != 0)
944 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
945 MEM_NOTRAP_P (slot) = 1;
946
947 return slot;
948}
949
950/* Allocate a temporary stack slot and record it for possible later
951 reuse. First two arguments are same as in preceding function. */
952
953rtx
954assign_stack_temp (machine_mode mode, poly_int64 size)
955{
956 return assign_stack_temp_for_type (mode, size, NULL_TREE);
957}
958
959/* Assign a temporary.
960 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
961 and so that should be used in error messages. In either case, we
962 allocate of the given type.
963 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
964 it is 0 if a register is OK.
965 DONT_PROMOTE is 1 if we should not promote values in register
966 to wider modes. */
967
968rtx
969assign_temp (tree type_or_decl, int memory_required,
970 int dont_promote ATTRIBUTE_UNUSED)
971{
972 tree type, decl;
973 machine_mode mode;
974#ifdef PROMOTE_MODE
975 int unsignedp;
976#endif
977
978 if (DECL_P (type_or_decl))
979 decl = type_or_decl, type = TREE_TYPE (decl);
980 else
981 decl = NULL, type = type_or_decl;
982
983 mode = TYPE_MODE (type);
984#ifdef PROMOTE_MODE
985 unsignedp = TYPE_UNSIGNED (type);
986#endif
987
988 /* Allocating temporaries of TREE_ADDRESSABLE type must be done in the front
989 end. See also create_tmp_var for the gimplification-time check. */
990 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
991
992 if (mode == BLKmode || memory_required)
993 {
994 poly_int64 size;
995 rtx tmp;
996
997 /* Unfortunately, we don't yet know how to allocate variable-sized
998 temporaries. However, sometimes we can find a fixed upper limit on
999 the size, so try that instead. */
1000 if (!poly_int_tree_p (TYPE_SIZE_UNIT (type), value: &size))
1001 size = max_int_size_in_bytes (type);
1002
1003 /* Zero sized arrays are a GNU C extension. Set size to 1 to avoid
1004 problems with allocating the stack space. */
1005 if (known_eq (size, 0))
1006 size = 1;
1007
1008 /* The size of the temporary may be too large to fit into an integer. */
1009 /* ??? Not sure this should happen except for user silliness, so limit
1010 this to things that aren't compiler-generated temporaries. The
1011 rest of the time we'll die in assign_stack_temp_for_type. */
1012 if (decl
1013 && !known_size_p (a: size)
1014 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
1015 {
1016 error ("size of variable %q+D is too large", decl);
1017 size = 1;
1018 }
1019
1020 tmp = assign_stack_temp_for_type (mode, size, type);
1021 return tmp;
1022 }
1023
1024#ifdef PROMOTE_MODE
1025 if (! dont_promote)
1026 mode = promote_mode (type, mode, &unsignedp);
1027#endif
1028
1029 return gen_reg_rtx (mode);
1030}
1031
1032/* Combine temporary stack slots which are adjacent on the stack.
1033
1034 This allows for better use of already allocated stack space. This is only
1035 done for BLKmode slots because we can be sure that we won't have alignment
1036 problems in this case. */
1037
1038static void
1039combine_temp_slots (void)
1040{
1041 class temp_slot *p, *q, *next, *next_q;
1042 int num_slots;
1043
1044 /* We can't combine slots, because the information about which slot
1045 is in which alias set will be lost. */
1046 if (flag_strict_aliasing)
1047 return;
1048
1049 /* If there are a lot of temp slots, don't do anything unless
1050 high levels of optimization. */
1051 if (! flag_expensive_optimizations)
1052 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1053 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1054 return;
1055
1056 for (p = avail_temp_slots; p; p = next)
1057 {
1058 int delete_p = 0;
1059
1060 next = p->next;
1061
1062 if (GET_MODE (p->slot) != BLKmode)
1063 continue;
1064
1065 for (q = p->next; q; q = next_q)
1066 {
1067 int delete_q = 0;
1068
1069 next_q = q->next;
1070
1071 if (GET_MODE (q->slot) != BLKmode)
1072 continue;
1073
1074 if (known_eq (p->base_offset + p->full_size, q->base_offset))
1075 {
1076 /* Q comes after P; combine Q into P. */
1077 p->size += q->size;
1078 p->full_size += q->full_size;
1079 delete_q = 1;
1080 }
1081 else if (known_eq (q->base_offset + q->full_size, p->base_offset))
1082 {
1083 /* P comes after Q; combine P into Q. */
1084 q->size += p->size;
1085 q->full_size += p->full_size;
1086 delete_p = 1;
1087 break;
1088 }
1089 if (delete_q)
1090 cut_slot_from_list (temp: q, list: &avail_temp_slots);
1091 }
1092
1093 /* Either delete P or advance past it. */
1094 if (delete_p)
1095 cut_slot_from_list (temp: p, list: &avail_temp_slots);
1096 }
1097}
1098
1099/* Indicate that NEW_RTX is an alternate way of referring to the temp
1100 slot that previously was known by OLD_RTX. */
1101
1102void
1103update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1104{
1105 class temp_slot *p;
1106
1107 if (rtx_equal_p (old_rtx, new_rtx))
1108 return;
1109
1110 p = find_temp_slot_from_address (x: old_rtx);
1111
1112 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1113 NEW_RTX is a register, see if one operand of the PLUS is a
1114 temporary location. If so, NEW_RTX points into it. Otherwise,
1115 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1116 in common between them. If so, try a recursive call on those
1117 values. */
1118 if (p == 0)
1119 {
1120 if (GET_CODE (old_rtx) != PLUS)
1121 return;
1122
1123 if (REG_P (new_rtx))
1124 {
1125 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1126 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1127 return;
1128 }
1129 else if (GET_CODE (new_rtx) != PLUS)
1130 return;
1131
1132 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1133 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1134 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1135 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1136 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1137 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1138 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1139 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1140
1141 return;
1142 }
1143
1144 /* Otherwise add an alias for the temp's address. */
1145 insert_temp_slot_address (address: new_rtx, temp_slot: p);
1146}
1147
1148/* If X could be a reference to a temporary slot, mark that slot as
1149 belonging to the to one level higher than the current level. If X
1150 matched one of our slots, just mark that one. Otherwise, we can't
1151 easily predict which it is, so upgrade all of them.
1152
1153 This is called when an ({...}) construct occurs and a statement
1154 returns a value in memory. */
1155
1156void
1157preserve_temp_slots (rtx x)
1158{
1159 class temp_slot *p = 0, *next;
1160
1161 if (x == 0)
1162 return;
1163
1164 /* If X is a register that is being used as a pointer, see if we have
1165 a temporary slot we know it points to. */
1166 if (REG_P (x) && REG_POINTER (x))
1167 p = find_temp_slot_from_address (x);
1168
1169 /* If X is not in memory or is at a constant address, it cannot be in
1170 a temporary slot. */
1171 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1172 return;
1173
1174 /* First see if we can find a match. */
1175 if (p == 0)
1176 p = find_temp_slot_from_address (XEXP (x, 0));
1177
1178 if (p != 0)
1179 {
1180 if (p->level == temp_slot_level)
1181 move_slot_to_level (temp: p, temp_slot_level - 1);
1182 return;
1183 }
1184
1185 /* Otherwise, preserve all non-kept slots at this level. */
1186 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1187 {
1188 next = p->next;
1189 move_slot_to_level (temp: p, temp_slot_level - 1);
1190 }
1191}
1192
1193/* Free all temporaries used so far. This is normally called at the
1194 end of generating code for a statement. */
1195
1196void
1197free_temp_slots (void)
1198{
1199 class temp_slot *p, *next;
1200 bool some_available = false;
1201
1202 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1203 {
1204 next = p->next;
1205 make_slot_available (temp: p);
1206 some_available = true;
1207 }
1208
1209 if (some_available)
1210 {
1211 remove_unused_temp_slot_addresses ();
1212 combine_temp_slots ();
1213 }
1214}
1215
1216/* Push deeper into the nesting level for stack temporaries. */
1217
1218void
1219push_temp_slots (void)
1220{
1221 temp_slot_level++;
1222}
1223
1224/* Pop a temporary nesting level. All slots in use in the current level
1225 are freed. */
1226
1227void
1228pop_temp_slots (void)
1229{
1230 free_temp_slots ();
1231 temp_slot_level--;
1232}
1233
1234/* Initialize temporary slots. */
1235
1236void
1237init_temp_slots (void)
1238{
1239 /* We have not allocated any temporaries yet. */
1240 avail_temp_slots = 0;
1241 vec_alloc (used_temp_slots, nelems: 0);
1242 temp_slot_level = 0;
1243 n_temp_slots_in_use = 0;
1244
1245 /* Set up the table to map addresses to temp slots. */
1246 if (! temp_slot_address_table)
1247 temp_slot_address_table = hash_table<temp_address_hasher>::create_ggc (n: 32);
1248 else
1249 temp_slot_address_table->empty ();
1250}
1251
1252/* Functions and data structures to keep track of the values hard regs
1253 had at the start of the function. */
1254
1255/* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1256 and has_hard_reg_initial_val.. */
1257struct GTY(()) initial_value_pair {
1258 rtx hard_reg;
1259 rtx pseudo;
1260};
1261/* ??? This could be a VEC but there is currently no way to define an
1262 opaque VEC type. This could be worked around by defining struct
1263 initial_value_pair in function.h. */
1264struct GTY(()) initial_value_struct {
1265 int num_entries;
1266 int max_entries;
1267 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
1268};
1269
1270/* If a pseudo represents an initial hard reg (or expression), return
1271 it, else return NULL_RTX. */
1272
1273rtx
1274get_hard_reg_initial_reg (rtx reg)
1275{
1276 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1277 int i;
1278
1279 if (ivs == 0)
1280 return NULL_RTX;
1281
1282 for (i = 0; i < ivs->num_entries; i++)
1283 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1284 return ivs->entries[i].hard_reg;
1285
1286 return NULL_RTX;
1287}
1288
1289/* Make sure that there's a pseudo register of mode MODE that stores the
1290 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1291
1292rtx
1293get_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1294{
1295 struct initial_value_struct *ivs;
1296 rtx rv;
1297
1298 rv = has_hard_reg_initial_val (mode, regno);
1299 if (rv)
1300 return rv;
1301
1302 ivs = crtl->hard_reg_initial_vals;
1303 if (ivs == 0)
1304 {
1305 ivs = ggc_alloc<initial_value_struct> ();
1306 ivs->num_entries = 0;
1307 ivs->max_entries = 5;
1308 ivs->entries = ggc_vec_alloc<initial_value_pair> (c: 5);
1309 crtl->hard_reg_initial_vals = ivs;
1310 }
1311
1312 if (ivs->num_entries >= ivs->max_entries)
1313 {
1314 ivs->max_entries += 5;
1315 ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
1316 ivs->max_entries);
1317 }
1318
1319 ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
1320 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
1321
1322 return ivs->entries[ivs->num_entries++].pseudo;
1323}
1324
1325/* See if get_hard_reg_initial_val has been used to create a pseudo
1326 for the initial value of hard register REGNO in mode MODE. Return
1327 the associated pseudo if so, otherwise return NULL. */
1328
1329rtx
1330has_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1331{
1332 struct initial_value_struct *ivs;
1333 int i;
1334
1335 ivs = crtl->hard_reg_initial_vals;
1336 if (ivs != 0)
1337 for (i = 0; i < ivs->num_entries; i++)
1338 if (GET_MODE (ivs->entries[i].hard_reg) == mode
1339 && REGNO (ivs->entries[i].hard_reg) == regno)
1340 return ivs->entries[i].pseudo;
1341
1342 return NULL_RTX;
1343}
1344
1345void
1346emit_initial_value_sets (void)
1347{
1348 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1349 int i;
1350 rtx_insn *seq;
1351
1352 if (ivs == 0)
1353 return;
1354
1355 start_sequence ();
1356 for (i = 0; i < ivs->num_entries; i++)
1357 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1358 seq = get_insns ();
1359 end_sequence ();
1360
1361 emit_insn_at_entry (seq);
1362}
1363
1364/* Return the hardreg-pseudoreg initial values pair entry I and
1365 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1366bool
1367initial_value_entry (int i, rtx *hreg, rtx *preg)
1368{
1369 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1370 if (!ivs || i >= ivs->num_entries)
1371 return false;
1372
1373 *hreg = ivs->entries[i].hard_reg;
1374 *preg = ivs->entries[i].pseudo;
1375 return true;
1376}
1377
1378/* These routines are responsible for converting virtual register references
1379 to the actual hard register references once RTL generation is complete.
1380
1381 The following four variables are used for communication between the
1382 routines. They contain the offsets of the virtual registers from their
1383 respective hard registers. */
1384
1385static poly_int64 in_arg_offset;
1386static poly_int64 var_offset;
1387static poly_int64 dynamic_offset;
1388static poly_int64 out_arg_offset;
1389static poly_int64 cfa_offset;
1390
1391/* In most machines, the stack pointer register is equivalent to the bottom
1392 of the stack. */
1393
1394#ifndef STACK_POINTER_OFFSET
1395#define STACK_POINTER_OFFSET 0
1396#endif
1397
1398#if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1399#define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1400#endif
1401
1402/* If not defined, pick an appropriate default for the offset of dynamically
1403 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1404 INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1405
1406#ifndef STACK_DYNAMIC_OFFSET
1407
1408/* The bottom of the stack points to the actual arguments. If
1409 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1410 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1411 stack space for register parameters is not pushed by the caller, but
1412 rather part of the fixed stack areas and hence not included in
1413 `crtl->outgoing_args_size'. Nevertheless, we must allow
1414 for it when allocating stack dynamic objects. */
1415
1416#ifdef INCOMING_REG_PARM_STACK_SPACE
1417#define STACK_DYNAMIC_OFFSET(FNDECL) \
1418((ACCUMULATE_OUTGOING_ARGS \
1419 ? (crtl->outgoing_args_size \
1420 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1421 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
1422 : 0) + (STACK_POINTER_OFFSET))
1423#else
1424#define STACK_DYNAMIC_OFFSET(FNDECL) \
1425 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : poly_int64 (0)) \
1426 + (STACK_POINTER_OFFSET))
1427#endif
1428#endif
1429
1430
1431/* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1432 is a virtual register, return the equivalent hard register and set the
1433 offset indirectly through the pointer. Otherwise, return 0. */
1434
1435static rtx
1436instantiate_new_reg (rtx x, poly_int64 *poffset)
1437{
1438 rtx new_rtx;
1439 poly_int64 offset;
1440
1441 if (x == virtual_incoming_args_rtx)
1442 {
1443 if (stack_realign_drap)
1444 {
1445 /* Replace virtual_incoming_args_rtx with internal arg
1446 pointer if DRAP is used to realign stack. */
1447 new_rtx = crtl->args.internal_arg_pointer;
1448 offset = 0;
1449 }
1450 else
1451 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1452 }
1453 else if (x == virtual_stack_vars_rtx)
1454 new_rtx = frame_pointer_rtx, offset = var_offset;
1455 else if (x == virtual_stack_dynamic_rtx)
1456 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1457 else if (x == virtual_outgoing_args_rtx)
1458 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1459 else if (x == virtual_cfa_rtx)
1460 {
1461#ifdef FRAME_POINTER_CFA_OFFSET
1462 new_rtx = frame_pointer_rtx;
1463#else
1464 new_rtx = arg_pointer_rtx;
1465#endif
1466 offset = cfa_offset;
1467 }
1468 else if (x == virtual_preferred_stack_boundary_rtx)
1469 {
1470 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1471 offset = 0;
1472 }
1473 else
1474 return NULL_RTX;
1475
1476 *poffset = offset;
1477 return new_rtx;
1478}
1479
1480/* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1481 registers present inside of *LOC. The expression is simplified,
1482 as much as possible, but is not to be considered "valid" in any sense
1483 implied by the target. Return true if any change is made. */
1484
1485static bool
1486instantiate_virtual_regs_in_rtx (rtx *loc)
1487{
1488 if (!*loc)
1489 return false;
1490 bool changed = false;
1491 subrtx_ptr_iterator::array_type array;
1492 FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
1493 {
1494 rtx *loc = *iter;
1495 if (rtx x = *loc)
1496 {
1497 rtx new_rtx;
1498 poly_int64 offset;
1499 switch (GET_CODE (x))
1500 {
1501 case REG:
1502 new_rtx = instantiate_new_reg (x, poffset: &offset);
1503 if (new_rtx)
1504 {
1505 *loc = plus_constant (GET_MODE (x), new_rtx, offset);
1506 changed = true;
1507 }
1508 iter.skip_subrtxes ();
1509 break;
1510
1511 case PLUS:
1512 new_rtx = instantiate_new_reg (XEXP (x, 0), poffset: &offset);
1513 if (new_rtx)
1514 {
1515 XEXP (x, 0) = new_rtx;
1516 *loc = plus_constant (GET_MODE (x), x, offset, true);
1517 changed = true;
1518 iter.skip_subrtxes ();
1519 break;
1520 }
1521
1522 /* FIXME -- from old code */
1523 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1524 we can commute the PLUS and SUBREG because pointers into the
1525 frame are well-behaved. */
1526 break;
1527
1528 default:
1529 break;
1530 }
1531 }
1532 }
1533 return changed;
1534}
1535
1536/* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1537 matches the predicate for insn CODE operand OPERAND. */
1538
1539static bool
1540safe_insn_predicate (int code, int operand, rtx x)
1541{
1542 return code < 0 || insn_operand_matches (icode: (enum insn_code) code, opno: operand, operand: x);
1543}
1544
1545/* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1546 registers present inside of insn. The result will be a valid insn. */
1547
1548static void
1549instantiate_virtual_regs_in_insn (rtx_insn *insn)
1550{
1551 poly_int64 offset;
1552 int insn_code, i;
1553 bool any_change = false;
1554 rtx set, new_rtx, x;
1555 rtx_insn *seq;
1556
1557 /* There are some special cases to be handled first. */
1558 set = single_set (insn);
1559 if (set)
1560 {
1561 /* We're allowed to assign to a virtual register. This is interpreted
1562 to mean that the underlying register gets assigned the inverse
1563 transformation. This is used, for example, in the handling of
1564 non-local gotos. */
1565 new_rtx = instantiate_new_reg (SET_DEST (set), poffset: &offset);
1566 if (new_rtx)
1567 {
1568 start_sequence ();
1569
1570 instantiate_virtual_regs_in_rtx (loc: &SET_SRC (set));
1571 x = simplify_gen_binary (code: PLUS, GET_MODE (new_rtx), SET_SRC (set),
1572 op1: gen_int_mode (-offset, GET_MODE (new_rtx)));
1573 x = force_operand (x, new_rtx);
1574 if (x != new_rtx)
1575 emit_move_insn (new_rtx, x);
1576
1577 seq = get_insns ();
1578 end_sequence ();
1579
1580 emit_insn_before (seq, insn);
1581 delete_insn (insn);
1582 return;
1583 }
1584
1585 /* Handle a straight copy from a virtual register by generating a
1586 new add insn. The difference between this and falling through
1587 to the generic case is avoiding a new pseudo and eliminating a
1588 move insn in the initial rtl stream. */
1589 new_rtx = instantiate_new_reg (SET_SRC (set), poffset: &offset);
1590 if (new_rtx
1591 && maybe_ne (a: offset, b: 0)
1592 && REG_P (SET_DEST (set))
1593 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1594 {
1595 start_sequence ();
1596
1597 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx,
1598 gen_int_mode (offset,
1599 GET_MODE (SET_DEST (set))),
1600 SET_DEST (set), 1, OPTAB_LIB_WIDEN);
1601 if (x != SET_DEST (set))
1602 emit_move_insn (SET_DEST (set), x);
1603
1604 seq = get_insns ();
1605 end_sequence ();
1606
1607 emit_insn_before (seq, insn);
1608 delete_insn (insn);
1609 return;
1610 }
1611
1612 extract_insn (insn);
1613 insn_code = INSN_CODE (insn);
1614
1615 /* Handle a plus involving a virtual register by determining if the
1616 operands remain valid if they're modified in place. */
1617 poly_int64 delta;
1618 if (GET_CODE (SET_SRC (set)) == PLUS
1619 && recog_data.n_operands >= 3
1620 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1621 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1622 && poly_int_rtx_p (x: recog_data.operand[2], res: &delta)
1623 && (new_rtx = instantiate_new_reg (x: recog_data.operand[1], poffset: &offset)))
1624 {
1625 offset += delta;
1626
1627 /* If the sum is zero, then replace with a plain move. */
1628 if (known_eq (offset, 0)
1629 && REG_P (SET_DEST (set))
1630 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1631 {
1632 start_sequence ();
1633 emit_move_insn (SET_DEST (set), new_rtx);
1634 seq = get_insns ();
1635 end_sequence ();
1636
1637 emit_insn_before (seq, insn);
1638 delete_insn (insn);
1639 return;
1640 }
1641
1642 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1643
1644 /* Using validate_change and apply_change_group here leaves
1645 recog_data in an invalid state. Since we know exactly what
1646 we want to check, do those two by hand. */
1647 if (safe_insn_predicate (code: insn_code, operand: 1, x: new_rtx)
1648 && safe_insn_predicate (code: insn_code, operand: 2, x))
1649 {
1650 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1651 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1652 any_change = true;
1653
1654 /* Fall through into the regular operand fixup loop in
1655 order to take care of operands other than 1 and 2. */
1656 }
1657 }
1658 }
1659 else
1660 {
1661 extract_insn (insn);
1662 insn_code = INSN_CODE (insn);
1663 }
1664
1665 /* In the general case, we expect virtual registers to appear only in
1666 operands, and then only as either bare registers or inside memories. */
1667 for (i = 0; i < recog_data.n_operands; ++i)
1668 {
1669 x = recog_data.operand[i];
1670 switch (GET_CODE (x))
1671 {
1672 case MEM:
1673 {
1674 rtx addr = XEXP (x, 0);
1675
1676 if (!instantiate_virtual_regs_in_rtx (loc: &addr))
1677 continue;
1678
1679 start_sequence ();
1680 x = replace_equiv_address (x, addr, true);
1681 /* It may happen that the address with the virtual reg
1682 was valid (e.g. based on the virtual stack reg, which might
1683 be acceptable to the predicates with all offsets), whereas
1684 the address now isn't anymore, for instance when the address
1685 is still offsetted, but the base reg isn't virtual-stack-reg
1686 anymore. Below we would do a force_reg on the whole operand,
1687 but this insn might actually only accept memory. Hence,
1688 before doing that last resort, try to reload the address into
1689 a register, so this operand stays a MEM. */
1690 if (!safe_insn_predicate (code: insn_code, operand: i, x))
1691 {
1692 addr = force_reg (GET_MODE (addr), addr);
1693 x = replace_equiv_address (x, addr, true);
1694 }
1695 seq = get_insns ();
1696 end_sequence ();
1697 if (seq)
1698 emit_insn_before (seq, insn);
1699 }
1700 break;
1701
1702 case REG:
1703 new_rtx = instantiate_new_reg (x, poffset: &offset);
1704 if (new_rtx == NULL)
1705 continue;
1706 if (known_eq (offset, 0))
1707 x = new_rtx;
1708 else
1709 {
1710 start_sequence ();
1711
1712 /* Careful, special mode predicates may have stuff in
1713 insn_data[insn_code].operand[i].mode that isn't useful
1714 to us for computing a new value. */
1715 /* ??? Recognize address_operand and/or "p" constraints
1716 to see if (plus new offset) is a valid before we put
1717 this through expand_simple_binop. */
1718 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1719 gen_int_mode (offset, GET_MODE (x)),
1720 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1721 seq = get_insns ();
1722 end_sequence ();
1723 emit_insn_before (seq, insn);
1724 }
1725 break;
1726
1727 case SUBREG:
1728 new_rtx = instantiate_new_reg (SUBREG_REG (x), poffset: &offset);
1729 if (new_rtx == NULL)
1730 continue;
1731 if (maybe_ne (a: offset, b: 0))
1732 {
1733 start_sequence ();
1734 new_rtx = expand_simple_binop
1735 (GET_MODE (new_rtx), PLUS, new_rtx,
1736 gen_int_mode (offset, GET_MODE (new_rtx)),
1737 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1738 seq = get_insns ();
1739 end_sequence ();
1740 emit_insn_before (seq, insn);
1741 }
1742 x = simplify_gen_subreg (outermode: recog_data.operand_mode[i], op: new_rtx,
1743 GET_MODE (new_rtx), SUBREG_BYTE (x));
1744 gcc_assert (x);
1745 break;
1746
1747 default:
1748 continue;
1749 }
1750
1751 /* At this point, X contains the new value for the operand.
1752 Validate the new value vs the insn predicate. Note that
1753 asm insns will have insn_code -1 here. */
1754 if (!safe_insn_predicate (code: insn_code, operand: i, x))
1755 {
1756 start_sequence ();
1757 if (REG_P (x))
1758 {
1759 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1760 x = copy_to_reg (x);
1761 }
1762 else
1763 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1764 seq = get_insns ();
1765 end_sequence ();
1766 if (seq)
1767 emit_insn_before (seq, insn);
1768 }
1769
1770 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1771 any_change = true;
1772 }
1773
1774 if (any_change)
1775 {
1776 /* Propagate operand changes into the duplicates. */
1777 for (i = 0; i < recog_data.n_dups; ++i)
1778 *recog_data.dup_loc[i]
1779 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1780
1781 /* Force re-recognition of the instruction for validation. */
1782 INSN_CODE (insn) = -1;
1783 }
1784
1785 if (asm_noperands (PATTERN (insn)) >= 0)
1786 {
1787 if (!check_asm_operands (PATTERN (insn)))
1788 {
1789 error_for_asm (insn, "impossible constraint in %<asm%>");
1790 /* For asm goto, instead of fixing up all the edges
1791 just clear the template and clear input and output operands
1792 and strip away clobbers. */
1793 if (JUMP_P (insn))
1794 {
1795 rtx asm_op = extract_asm_operands (PATTERN (insn));
1796 PATTERN (insn) = asm_op;
1797 PUT_MODE (x: asm_op, VOIDmode);
1798 ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
1799 ASM_OPERANDS_OUTPUT_CONSTRAINT (asm_op) = "";
1800 ASM_OPERANDS_OUTPUT_IDX (asm_op) = 0;
1801 ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
1802 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
1803 }
1804 else
1805 delete_insn (insn);
1806 }
1807 }
1808 else
1809 {
1810 if (recog_memoized (insn) < 0)
1811 fatal_insn_not_found (insn);
1812 }
1813}
1814
1815/* Subroutine of instantiate_decls. Given RTL representing a decl,
1816 do any instantiation required. */
1817
1818void
1819instantiate_decl_rtl (rtx x)
1820{
1821 rtx addr;
1822
1823 if (x == 0)
1824 return;
1825
1826 /* If this is a CONCAT, recurse for the pieces. */
1827 if (GET_CODE (x) == CONCAT)
1828 {
1829 instantiate_decl_rtl (XEXP (x, 0));
1830 instantiate_decl_rtl (XEXP (x, 1));
1831 return;
1832 }
1833
1834 /* If this is not a MEM, no need to do anything. Similarly if the
1835 address is a constant or a register that is not a virtual register. */
1836 if (!MEM_P (x))
1837 return;
1838
1839 addr = XEXP (x, 0);
1840 if (CONSTANT_P (addr)
1841 || (REG_P (addr)
1842 && !VIRTUAL_REGISTER_P (addr)))
1843 return;
1844
1845 instantiate_virtual_regs_in_rtx (loc: &XEXP (x, 0));
1846}
1847
1848/* Helper for instantiate_decls called via walk_tree: Process all decls
1849 in the given DECL_VALUE_EXPR. */
1850
1851static tree
1852instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1853{
1854 tree t = *tp;
1855 if (! EXPR_P (t))
1856 {
1857 *walk_subtrees = 0;
1858 if (DECL_P (t))
1859 {
1860 if (DECL_RTL_SET_P (t))
1861 instantiate_decl_rtl (DECL_RTL (t));
1862 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1863 && DECL_INCOMING_RTL (t))
1864 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1865 if ((VAR_P (t) || TREE_CODE (t) == RESULT_DECL)
1866 && DECL_HAS_VALUE_EXPR_P (t))
1867 {
1868 tree v = DECL_VALUE_EXPR (t);
1869 walk_tree (&v, instantiate_expr, NULL, NULL);
1870 }
1871 }
1872 }
1873 return NULL;
1874}
1875
1876/* Subroutine of instantiate_decls: Process all decls in the given
1877 BLOCK node and all its subblocks. */
1878
1879static void
1880instantiate_decls_1 (tree let)
1881{
1882 tree t;
1883
1884 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
1885 {
1886 if (DECL_RTL_SET_P (t))
1887 instantiate_decl_rtl (DECL_RTL (t));
1888 if (VAR_P (t) && DECL_HAS_VALUE_EXPR_P (t))
1889 {
1890 tree v = DECL_VALUE_EXPR (t);
1891 walk_tree (&v, instantiate_expr, NULL, NULL);
1892 }
1893 }
1894
1895 /* Process all subblocks. */
1896 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1897 instantiate_decls_1 (let: t);
1898}
1899
1900/* Scan all decls in FNDECL (both variables and parameters) and instantiate
1901 all virtual registers in their DECL_RTL's. */
1902
1903static void
1904instantiate_decls (tree fndecl)
1905{
1906 tree decl;
1907 unsigned ix;
1908
1909 /* Process all parameters of the function. */
1910 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
1911 {
1912 instantiate_decl_rtl (DECL_RTL (decl));
1913 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1914 if (DECL_HAS_VALUE_EXPR_P (decl))
1915 {
1916 tree v = DECL_VALUE_EXPR (decl);
1917 walk_tree (&v, instantiate_expr, NULL, NULL);
1918 }
1919 }
1920
1921 if ((decl = DECL_RESULT (fndecl))
1922 && TREE_CODE (decl) == RESULT_DECL)
1923 {
1924 if (DECL_RTL_SET_P (decl))
1925 instantiate_decl_rtl (DECL_RTL (decl));
1926 if (DECL_HAS_VALUE_EXPR_P (decl))
1927 {
1928 tree v = DECL_VALUE_EXPR (decl);
1929 walk_tree (&v, instantiate_expr, NULL, NULL);
1930 }
1931 }
1932
1933 /* Process the saved static chain if it exists. */
1934 decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl;
1935 if (decl && DECL_HAS_VALUE_EXPR_P (decl))
1936 instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl)));
1937
1938 /* Now process all variables defined in the function or its subblocks. */
1939 if (DECL_INITIAL (fndecl))
1940 instantiate_decls_1 (DECL_INITIAL (fndecl));
1941
1942 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1943 if (DECL_RTL_SET_P (decl))
1944 instantiate_decl_rtl (DECL_RTL (decl));
1945 vec_free (v&: cfun->local_decls);
1946}
1947
1948/* Return the value of STACK_DYNAMIC_OFFSET for the current function.
1949 This is done through a function wrapper so that the macro sees a
1950 predictable set of included files. */
1951
1952poly_int64
1953get_stack_dynamic_offset ()
1954{
1955 return STACK_DYNAMIC_OFFSET (current_function_decl);
1956}
1957
1958/* Pass through the INSNS of function FNDECL and convert virtual register
1959 references to hard register references. */
1960
1961static void
1962instantiate_virtual_regs (void)
1963{
1964 rtx_insn *insn;
1965
1966 /* Compute the offsets to use for this function. */
1967 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1968 var_offset = targetm.starting_frame_offset ();
1969 dynamic_offset = get_stack_dynamic_offset ();
1970 out_arg_offset = STACK_POINTER_OFFSET;
1971#ifdef FRAME_POINTER_CFA_OFFSET
1972 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1973#else
1974 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1975#endif
1976
1977 /* Initialize recognition, indicating that volatile is OK. */
1978 init_recog ();
1979
1980 /* Scan through all the insns, instantiating every virtual register still
1981 present. */
1982 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1983 if (INSN_P (insn))
1984 {
1985 /* These patterns in the instruction stream can never be recognized.
1986 Fortunately, they shouldn't contain virtual registers either. */
1987 if (GET_CODE (PATTERN (insn)) == USE
1988 || GET_CODE (PATTERN (insn)) == CLOBBER
1989 || GET_CODE (PATTERN (insn)) == ASM_INPUT
1990 || DEBUG_MARKER_INSN_P (insn))
1991 continue;
1992 else if (DEBUG_BIND_INSN_P (insn))
1993 instantiate_virtual_regs_in_rtx (INSN_VAR_LOCATION_PTR (insn));
1994 else
1995 instantiate_virtual_regs_in_insn (insn);
1996
1997 if (insn->deleted ())
1998 continue;
1999
2000 instantiate_virtual_regs_in_rtx (loc: &REG_NOTES (insn));
2001
2002 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
2003 if (CALL_P (insn))
2004 instantiate_virtual_regs_in_rtx (loc: &CALL_INSN_FUNCTION_USAGE (insn));
2005 }
2006
2007 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
2008 instantiate_decls (fndecl: current_function_decl);
2009
2010 targetm.instantiate_decls ();
2011
2012 /* Indicate that, from now on, assign_stack_local should use
2013 frame_pointer_rtx. */
2014 virtuals_instantiated = 1;
2015}
2016
2017namespace {
2018
2019const pass_data pass_data_instantiate_virtual_regs =
2020{
2021 .type: RTL_PASS, /* type */
2022 .name: "vregs", /* name */
2023 .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */
2024 .tv_id: TV_NONE, /* tv_id */
2025 .properties_required: 0, /* properties_required */
2026 .properties_provided: 0, /* properties_provided */
2027 .properties_destroyed: 0, /* properties_destroyed */
2028 .todo_flags_start: 0, /* todo_flags_start */
2029 .todo_flags_finish: 0, /* todo_flags_finish */
2030};
2031
2032class pass_instantiate_virtual_regs : public rtl_opt_pass
2033{
2034public:
2035 pass_instantiate_virtual_regs (gcc::context *ctxt)
2036 : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt)
2037 {}
2038
2039 /* opt_pass methods: */
2040 unsigned int execute (function *) final override
2041 {
2042 instantiate_virtual_regs ();
2043 return 0;
2044 }
2045
2046}; // class pass_instantiate_virtual_regs
2047
2048} // anon namespace
2049
2050rtl_opt_pass *
2051make_pass_instantiate_virtual_regs (gcc::context *ctxt)
2052{
2053 return new pass_instantiate_virtual_regs (ctxt);
2054}
2055
2056
2057/* Return true if EXP is an aggregate type (or a value with aggregate type).
2058 This means a type for which function calls must pass an address to the
2059 function or get an address back from the function.
2060 EXP may be a type node or an expression (whose type is tested). */
2061
2062bool
2063aggregate_value_p (const_tree exp, const_tree fntype)
2064{
2065 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
2066 int i, regno, nregs;
2067 rtx reg;
2068
2069 if (fntype)
2070 switch (TREE_CODE (fntype))
2071 {
2072 case CALL_EXPR:
2073 {
2074 tree fndecl = get_callee_fndecl (fntype);
2075 if (fndecl)
2076 fntype = TREE_TYPE (fndecl);
2077 else if (CALL_EXPR_FN (fntype))
2078 fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype)));
2079 else
2080 /* For internal functions, assume nothing needs to be
2081 returned in memory. */
2082 return false;
2083 }
2084 break;
2085 case FUNCTION_DECL:
2086 fntype = TREE_TYPE (fntype);
2087 break;
2088 case FUNCTION_TYPE:
2089 case METHOD_TYPE:
2090 break;
2091 case IDENTIFIER_NODE:
2092 fntype = NULL_TREE;
2093 break;
2094 default:
2095 /* We don't expect other tree types here. */
2096 gcc_unreachable ();
2097 }
2098
2099 if (VOID_TYPE_P (type))
2100 return false;
2101
2102 if (error_operand_p (t: fntype))
2103 return false;
2104
2105 /* If a record should be passed the same as its first (and only) member
2106 don't pass it as an aggregate. */
2107 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2108 return aggregate_value_p (exp: first_field (type), fntype);
2109
2110 /* If the front end has decided that this needs to be passed by
2111 reference, do so. */
2112 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2113 && DECL_BY_REFERENCE (exp))
2114 return true;
2115
2116 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2117 if (fntype && TREE_ADDRESSABLE (fntype))
2118 return true;
2119
2120 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2121 and thus can't be returned in registers. */
2122 if (TREE_ADDRESSABLE (type))
2123 return true;
2124
2125 if (TYPE_EMPTY_P (type))
2126 return false;
2127
2128 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
2129 return true;
2130
2131 if (targetm.calls.return_in_memory (type, fntype))
2132 return true;
2133
2134 /* Make sure we have suitable call-clobbered regs to return
2135 the value in; if not, we must return it in memory. */
2136 reg = hard_function_value (type, 0, fntype, 0);
2137
2138 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2139 it is OK. */
2140 if (!REG_P (reg))
2141 return false;
2142
2143 /* Use the default ABI if the type of the function isn't known.
2144 The scheme for handling interoperability between different ABIs
2145 requires us to be able to tell when we're calling a function with
2146 a nondefault ABI. */
2147 const predefined_function_abi &abi = (fntype
2148 ? fntype_abi (fntype)
2149 : default_function_abi);
2150 regno = REGNO (reg);
2151 nregs = hard_regno_nregs (regno, TYPE_MODE (type));
2152 for (i = 0; i < nregs; i++)
2153 if (!fixed_regs[regno + i] && !abi.clobbers_full_reg_p (regno: regno + i))
2154 return true;
2155
2156 return false;
2157}
2158
2159/* Return true if we should assign DECL a pseudo register; false if it
2160 should live on the local stack. */
2161
2162bool
2163use_register_for_decl (const_tree decl)
2164{
2165 if (TREE_CODE (decl) == SSA_NAME)
2166 {
2167 /* We often try to use the SSA_NAME, instead of its underlying
2168 decl, to get type information and guide decisions, to avoid
2169 differences of behavior between anonymous and named
2170 variables, but in this one case we have to go for the actual
2171 variable if there is one. The main reason is that, at least
2172 at -O0, we want to place user variables on the stack, but we
2173 don't mind using pseudos for anonymous or ignored temps.
2174 Should we take the SSA_NAME, we'd conclude all SSA_NAMEs
2175 should go in pseudos, whereas their corresponding variables
2176 might have to go on the stack. So, disregarding the decl
2177 here would negatively impact debug info at -O0, enable
2178 coalescing between SSA_NAMEs that ought to get different
2179 stack/pseudo assignments, and get the incoming argument
2180 processing thoroughly confused by PARM_DECLs expected to live
2181 in stack slots but assigned to pseudos. */
2182 if (!SSA_NAME_VAR (decl))
2183 return TYPE_MODE (TREE_TYPE (decl)) != BLKmode
2184 && !(flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)));
2185
2186 decl = SSA_NAME_VAR (decl);
2187 }
2188
2189 /* Honor volatile. */
2190 if (TREE_SIDE_EFFECTS (decl))
2191 return false;
2192
2193 /* Honor addressability. */
2194 if (TREE_ADDRESSABLE (decl))
2195 return false;
2196
2197 /* RESULT_DECLs are a bit special in that they're assigned without
2198 regard to use_register_for_decl, but we generally only store in
2199 them. If we coalesce their SSA NAMEs, we'd better return a
2200 result that matches the assignment in expand_function_start. */
2201 if (TREE_CODE (decl) == RESULT_DECL)
2202 {
2203 /* If it's not an aggregate, we're going to use a REG or a
2204 PARALLEL containing a REG. */
2205 if (!aggregate_value_p (exp: decl, fntype: current_function_decl))
2206 return true;
2207
2208 /* If expand_function_start determines the return value, we'll
2209 use MEM if it's not by reference. */
2210 if (cfun->returns_pcc_struct
2211 || (targetm.calls.struct_value_rtx
2212 (TREE_TYPE (current_function_decl), 1)))
2213 return DECL_BY_REFERENCE (decl);
2214
2215 /* Otherwise, we're taking an extra all.function_result_decl
2216 argument. It's set up in assign_parms_augmented_arg_list,
2217 under the (negated) conditions above, and then it's used to
2218 set up the RESULT_DECL rtl in assign_params, after looping
2219 over all parameters. Now, if the RESULT_DECL is not by
2220 reference, we'll use a MEM either way. */
2221 if (!DECL_BY_REFERENCE (decl))
2222 return false;
2223
2224 /* Otherwise, if RESULT_DECL is DECL_BY_REFERENCE, it will take
2225 the function_result_decl's assignment. Since it's a pointer,
2226 we can short-circuit a number of the tests below, and we must
2227 duplicate them because we don't have the function_result_decl
2228 to test. */
2229 if (!targetm.calls.allocate_stack_slots_for_args ())
2230 return true;
2231 /* We don't set DECL_IGNORED_P for the function_result_decl. */
2232 if (optimize)
2233 return true;
2234 if (cfun->tail_call_marked)
2235 return true;
2236 /* We don't set DECL_REGISTER for the function_result_decl. */
2237 return false;
2238 }
2239
2240 /* Only register-like things go in registers. */
2241 if (DECL_MODE (decl) == BLKmode)
2242 return false;
2243
2244 /* If -ffloat-store specified, don't put explicit float variables
2245 into registers. */
2246 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2247 propagates values across these stores, and it probably shouldn't. */
2248 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2249 return false;
2250
2251 if (!targetm.calls.allocate_stack_slots_for_args ())
2252 return true;
2253
2254 /* If we're not interested in tracking debugging information for
2255 this decl, then we can certainly put it in a register. */
2256 if (DECL_IGNORED_P (decl))
2257 return true;
2258
2259 if (optimize)
2260 return true;
2261
2262 /* Thunks force a tail call even at -O0 so we need to avoid creating a
2263 dangling reference in case the parameter is passed by reference. */
2264 if (TREE_CODE (decl) == PARM_DECL && cfun->tail_call_marked)
2265 return true;
2266
2267 if (!DECL_REGISTER (decl))
2268 return false;
2269
2270 /* When not optimizing, disregard register keyword for types that
2271 could have methods, otherwise the methods won't be callable from
2272 the debugger. */
2273 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (decl)))
2274 return false;
2275
2276 return true;
2277}
2278
2279/* Structures to communicate between the subroutines of assign_parms.
2280 The first holds data persistent across all parameters, the second
2281 is cleared out for each parameter. */
2282
2283struct assign_parm_data_all
2284{
2285 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2286 should become a job of the target or otherwise encapsulated. */
2287 CUMULATIVE_ARGS args_so_far_v;
2288 cumulative_args_t args_so_far;
2289 struct args_size stack_args_size;
2290 tree function_result_decl;
2291 tree orig_fnargs;
2292 rtx_insn *first_conversion_insn;
2293 rtx_insn *last_conversion_insn;
2294 HOST_WIDE_INT pretend_args_size;
2295 HOST_WIDE_INT extra_pretend_bytes;
2296 int reg_parm_stack_space;
2297};
2298
2299struct assign_parm_data_one
2300{
2301 tree nominal_type;
2302 function_arg_info arg;
2303 rtx entry_parm;
2304 rtx stack_parm;
2305 machine_mode nominal_mode;
2306 machine_mode passed_mode;
2307 struct locate_and_pad_arg_data locate;
2308 int partial;
2309};
2310
2311/* A subroutine of assign_parms. Initialize ALL. */
2312
2313static void
2314assign_parms_initialize_all (struct assign_parm_data_all *all)
2315{
2316 tree fntype ATTRIBUTE_UNUSED;
2317
2318 memset (s: all, c: 0, n: sizeof (*all));
2319
2320 fntype = TREE_TYPE (current_function_decl);
2321
2322#ifdef INIT_CUMULATIVE_INCOMING_ARGS
2323 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
2324#else
2325 INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
2326 current_function_decl, -1);
2327#endif
2328 all->args_so_far = pack_cumulative_args (arg: &all->args_so_far_v);
2329
2330#ifdef INCOMING_REG_PARM_STACK_SPACE
2331 all->reg_parm_stack_space
2332 = INCOMING_REG_PARM_STACK_SPACE (current_function_decl);
2333#endif
2334}
2335
2336/* If ARGS contains entries with complex types, split the entry into two
2337 entries of the component type. Return a new list of substitutions are
2338 needed, else the old list. */
2339
2340static void
2341split_complex_args (vec<tree> *args)
2342{
2343 unsigned i;
2344 tree p;
2345
2346 FOR_EACH_VEC_ELT (*args, i, p)
2347 {
2348 tree type = TREE_TYPE (p);
2349 if (TREE_CODE (type) == COMPLEX_TYPE
2350 && targetm.calls.split_complex_arg (type))
2351 {
2352 tree decl;
2353 tree subtype = TREE_TYPE (type);
2354 bool addressable = TREE_ADDRESSABLE (p);
2355
2356 /* Rewrite the PARM_DECL's type with its component. */
2357 p = copy_node (p);
2358 TREE_TYPE (p) = subtype;
2359 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2360 SET_DECL_MODE (p, VOIDmode);
2361 DECL_SIZE (p) = NULL;
2362 DECL_SIZE_UNIT (p) = NULL;
2363 /* If this arg must go in memory, put it in a pseudo here.
2364 We can't allow it to go in memory as per normal parms,
2365 because the usual place might not have the imag part
2366 adjacent to the real part. */
2367 DECL_ARTIFICIAL (p) = addressable;
2368 DECL_IGNORED_P (p) = addressable;
2369 TREE_ADDRESSABLE (p) = 0;
2370 layout_decl (p, 0);
2371 (*args)[i] = p;
2372
2373 /* Build a second synthetic decl. */
2374 decl = build_decl (EXPR_LOCATION (p),
2375 PARM_DECL, NULL_TREE, subtype);
2376 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2377 DECL_ARTIFICIAL (decl) = addressable;
2378 DECL_IGNORED_P (decl) = addressable;
2379 layout_decl (decl, 0);
2380 args->safe_insert (ix: ++i, obj: decl);
2381 }
2382 }
2383}
2384
2385/* A subroutine of assign_parms. Adjust the parameter list to incorporate
2386 the hidden struct return argument, and (abi willing) complex args.
2387 Return the new parameter list. */
2388
2389static vec<tree>
2390assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2391{
2392 tree fndecl = current_function_decl;
2393 tree fntype = TREE_TYPE (fndecl);
2394 vec<tree> fnargs = vNULL;
2395 tree arg;
2396
2397 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
2398 fnargs.safe_push (obj: arg);
2399
2400 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
2401
2402 /* If struct value address is treated as the first argument, make it so. */
2403 if (aggregate_value_p (DECL_RESULT (fndecl), fntype: fndecl)
2404 && ! cfun->returns_pcc_struct
2405 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2406 {
2407 tree type = build_pointer_type (TREE_TYPE (fntype));
2408 tree decl;
2409
2410 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2411 PARM_DECL, get_identifier (".result_ptr"), type);
2412 DECL_ARG_TYPE (decl) = type;
2413 DECL_ARTIFICIAL (decl) = 1;
2414 DECL_NAMELESS (decl) = 1;
2415 TREE_CONSTANT (decl) = 1;
2416 /* We don't set DECL_IGNORED_P or DECL_REGISTER here. If this
2417 changes, the end of the RESULT_DECL handling block in
2418 use_register_for_decl must be adjusted to match. */
2419
2420 DECL_CHAIN (decl) = all->orig_fnargs;
2421 all->orig_fnargs = decl;
2422 fnargs.safe_insert (ix: 0, obj: decl);
2423
2424 all->function_result_decl = decl;
2425 }
2426
2427 /* If the target wants to split complex arguments into scalars, do so. */
2428 if (targetm.calls.split_complex_arg)
2429 split_complex_args (args: &fnargs);
2430
2431 return fnargs;
2432}
2433
2434/* A subroutine of assign_parms. Examine PARM and pull out type and mode
2435 data for the parameter. Incorporate ABI specifics such as pass-by-
2436 reference and type promotion. */
2437
2438static void
2439assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2440 struct assign_parm_data_one *data)
2441{
2442 int unsignedp;
2443
2444 *data = assign_parm_data_one ();
2445
2446 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2447 if (!cfun->stdarg)
2448 data->arg.named = 1; /* No variadic parms. */
2449 else if (DECL_CHAIN (parm))
2450 data->arg.named = 1; /* Not the last non-variadic parm. */
2451 else if (targetm.calls.strict_argument_naming (all->args_so_far))
2452 data->arg.named = 1; /* Only variadic ones are unnamed. */
2453 else
2454 data->arg.named = 0; /* Treat as variadic. */
2455
2456 data->nominal_type = TREE_TYPE (parm);
2457 data->arg.type = DECL_ARG_TYPE (parm);
2458
2459 /* Look out for errors propagating this far. Also, if the parameter's
2460 type is void then its value doesn't matter. */
2461 if (TREE_TYPE (parm) == error_mark_node
2462 /* This can happen after weird syntax errors
2463 or if an enum type is defined among the parms. */
2464 || TREE_CODE (parm) != PARM_DECL
2465 || data->arg.type == NULL
2466 || VOID_TYPE_P (data->nominal_type))
2467 {
2468 data->nominal_type = data->arg.type = void_type_node;
2469 data->nominal_mode = data->passed_mode = data->arg.mode = VOIDmode;
2470 return;
2471 }
2472
2473 /* Find mode of arg as it is passed, and mode of arg as it should be
2474 during execution of this function. */
2475 data->passed_mode = data->arg.mode = TYPE_MODE (data->arg.type);
2476 data->nominal_mode = TYPE_MODE (data->nominal_type);
2477
2478 /* If the parm is to be passed as a transparent union or record, use the
2479 type of the first field for the tests below. We have already verified
2480 that the modes are the same. */
2481 if (RECORD_OR_UNION_TYPE_P (data->arg.type)
2482 && TYPE_TRANSPARENT_AGGR (data->arg.type))
2483 data->arg.type = TREE_TYPE (first_field (data->arg.type));
2484
2485 /* See if this arg was passed by invisible reference. */
2486 if (apply_pass_by_reference_rules (&all->args_so_far_v, data->arg))
2487 {
2488 data->nominal_type = data->arg.type;
2489 data->passed_mode = data->nominal_mode = data->arg.mode;
2490 }
2491
2492 /* Find mode as it is passed by the ABI. */
2493 unsignedp = TYPE_UNSIGNED (data->arg.type);
2494 data->arg.mode
2495 = promote_function_mode (data->arg.type, data->arg.mode, &unsignedp,
2496 TREE_TYPE (current_function_decl), 0);
2497}
2498
2499/* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2500
2501static void
2502assign_parms_setup_varargs (struct assign_parm_data_all *all,
2503 struct assign_parm_data_one *data, bool no_rtl)
2504{
2505 int varargs_pretend_bytes = 0;
2506
2507 function_arg_info last_named_arg = data->arg;
2508 last_named_arg.named = true;
2509 targetm.calls.setup_incoming_varargs (all->args_so_far, last_named_arg,
2510 &varargs_pretend_bytes, no_rtl);
2511
2512 /* If the back-end has requested extra stack space, record how much is
2513 needed. Do not change pretend_args_size otherwise since it may be
2514 nonzero from an earlier partial argument. */
2515 if (varargs_pretend_bytes > 0)
2516 all->pretend_args_size = varargs_pretend_bytes;
2517}
2518
2519/* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2520 the incoming location of the current parameter. */
2521
2522static void
2523assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2524 struct assign_parm_data_one *data)
2525{
2526 HOST_WIDE_INT pretend_bytes = 0;
2527 rtx entry_parm;
2528 bool in_regs;
2529
2530 if (data->arg.mode == VOIDmode)
2531 {
2532 data->entry_parm = data->stack_parm = const0_rtx;
2533 return;
2534 }
2535
2536 targetm.calls.warn_parameter_passing_abi (all->args_so_far,
2537 data->arg.type);
2538
2539 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
2540 data->arg);
2541 if (entry_parm == 0)
2542 data->arg.mode = data->passed_mode;
2543
2544 /* Determine parm's home in the stack, in case it arrives in the stack
2545 or we should pretend it did. Compute the stack position and rtx where
2546 the argument arrives and its size.
2547
2548 There is one complexity here: If this was a parameter that would
2549 have been passed in registers, but wasn't only because it is
2550 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2551 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2552 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2553 as it was the previous time. */
2554 in_regs = (entry_parm != 0);
2555#ifdef STACK_PARMS_IN_REG_PARM_AREA
2556 in_regs = true;
2557#endif
2558 if (!in_regs && !data->arg.named)
2559 {
2560 if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
2561 {
2562 rtx tem;
2563 function_arg_info named_arg = data->arg;
2564 named_arg.named = true;
2565 tem = targetm.calls.function_incoming_arg (all->args_so_far,
2566 named_arg);
2567 in_regs = tem != NULL;
2568 }
2569 }
2570
2571 /* If this parameter was passed both in registers and in the stack, use
2572 the copy on the stack. */
2573 if (targetm.calls.must_pass_in_stack (data->arg))
2574 entry_parm = 0;
2575
2576 if (entry_parm)
2577 {
2578 int partial;
2579
2580 partial = targetm.calls.arg_partial_bytes (all->args_so_far, data->arg);
2581 data->partial = partial;
2582
2583 /* The caller might already have allocated stack space for the
2584 register parameters. */
2585 if (partial != 0 && all->reg_parm_stack_space == 0)
2586 {
2587 /* Part of this argument is passed in registers and part
2588 is passed on the stack. Ask the prologue code to extend
2589 the stack part so that we can recreate the full value.
2590
2591 PRETEND_BYTES is the size of the registers we need to store.
2592 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2593 stack space that the prologue should allocate.
2594
2595 Internally, gcc assumes that the argument pointer is aligned
2596 to STACK_BOUNDARY bits. This is used both for alignment
2597 optimizations (see init_emit) and to locate arguments that are
2598 aligned to more than PARM_BOUNDARY bits. We must preserve this
2599 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2600 a stack boundary. */
2601
2602 /* We assume at most one partial arg, and it must be the first
2603 argument on the stack. */
2604 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2605
2606 pretend_bytes = partial;
2607 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2608
2609 /* We want to align relative to the actual stack pointer, so
2610 don't include this in the stack size until later. */
2611 all->extra_pretend_bytes = all->pretend_args_size;
2612 }
2613 }
2614
2615 locate_and_pad_parm (data->arg.mode, data->arg.type, in_regs,
2616 all->reg_parm_stack_space,
2617 entry_parm ? data->partial : 0, current_function_decl,
2618 &all->stack_args_size, &data->locate);
2619
2620 /* Update parm_stack_boundary if this parameter is passed in the
2621 stack. */
2622 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2623 crtl->parm_stack_boundary = data->locate.boundary;
2624
2625 /* Adjust offsets to include the pretend args. */
2626 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2627 data->locate.slot_offset.constant += pretend_bytes;
2628 data->locate.offset.constant += pretend_bytes;
2629
2630 data->entry_parm = entry_parm;
2631}
2632
2633/* A subroutine of assign_parms. If there is actually space on the stack
2634 for this parm, count it in stack_args_size and return true. */
2635
2636static bool
2637assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2638 struct assign_parm_data_one *data)
2639{
2640 /* Trivially true if we've no incoming register. */
2641 if (data->entry_parm == NULL)
2642 ;
2643 /* Also true if we're partially in registers and partially not,
2644 since we've arranged to drop the entire argument on the stack. */
2645 else if (data->partial != 0)
2646 ;
2647 /* Also true if the target says that it's passed in both registers
2648 and on the stack. */
2649 else if (GET_CODE (data->entry_parm) == PARALLEL
2650 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2651 ;
2652 /* Also true if the target says that there's stack allocated for
2653 all register parameters. */
2654 else if (all->reg_parm_stack_space > 0)
2655 ;
2656 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2657 else
2658 return false;
2659
2660 all->stack_args_size.constant += data->locate.size.constant;
2661 if (data->locate.size.var)
2662 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2663
2664 return true;
2665}
2666
2667/* A subroutine of assign_parms. Given that this parameter is allocated
2668 stack space by the ABI, find it. */
2669
2670static void
2671assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2672{
2673 rtx offset_rtx, stack_parm;
2674 unsigned int align, boundary;
2675
2676 /* If we're passing this arg using a reg, make its stack home the
2677 aligned stack slot. */
2678 if (data->entry_parm)
2679 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2680 else
2681 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2682
2683 stack_parm = crtl->args.internal_arg_pointer;
2684 if (offset_rtx != const0_rtx)
2685 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2686 stack_parm = gen_rtx_MEM (data->arg.mode, stack_parm);
2687
2688 if (!data->arg.pass_by_reference)
2689 {
2690 set_mem_attributes (stack_parm, parm, 1);
2691 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2692 while promoted mode's size is needed. */
2693 if (data->arg.mode != BLKmode
2694 && data->arg.mode != DECL_MODE (parm))
2695 {
2696 set_mem_size (stack_parm, GET_MODE_SIZE (mode: data->arg.mode));
2697 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
2698 {
2699 poly_int64 offset = subreg_lowpart_offset (DECL_MODE (parm),
2700 innermode: data->arg.mode);
2701 if (maybe_ne (a: offset, b: 0))
2702 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
2703 }
2704 }
2705 }
2706
2707 boundary = data->locate.boundary;
2708 align = BITS_PER_UNIT;
2709
2710 /* If we're padding upward, we know that the alignment of the slot
2711 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2712 intentionally forcing upward padding. Otherwise we have to come
2713 up with a guess at the alignment based on OFFSET_RTX. */
2714 poly_int64 offset;
2715 if (data->locate.where_pad == PAD_NONE || data->entry_parm)
2716 align = boundary;
2717 else if (data->locate.where_pad == PAD_UPWARD)
2718 {
2719 align = boundary;
2720 /* If the argument offset is actually more aligned than the nominal
2721 stack slot boundary, take advantage of that excess alignment.
2722 Don't make any assumptions if STACK_POINTER_OFFSET is in use. */
2723 if (poly_int_rtx_p (x: offset_rtx, res: &offset)
2724 && known_eq (STACK_POINTER_OFFSET, 0))
2725 {
2726 unsigned int offset_align = known_alignment (a: offset) * BITS_PER_UNIT;
2727 if (offset_align == 0 || offset_align > STACK_BOUNDARY)
2728 offset_align = STACK_BOUNDARY;
2729 align = MAX (align, offset_align);
2730 }
2731 }
2732 else if (poly_int_rtx_p (x: offset_rtx, res: &offset))
2733 {
2734 align = least_bit_hwi (x: boundary);
2735 unsigned int offset_align = known_alignment (a: offset) * BITS_PER_UNIT;
2736 if (offset_align != 0)
2737 align = MIN (align, offset_align);
2738 }
2739 set_mem_align (stack_parm, align);
2740
2741 if (data->entry_parm)
2742 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2743
2744 data->stack_parm = stack_parm;
2745}
2746
2747/* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2748 always valid and contiguous. */
2749
2750static void
2751assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2752{
2753 rtx entry_parm = data->entry_parm;
2754 rtx stack_parm = data->stack_parm;
2755
2756 /* If this parm was passed part in regs and part in memory, pretend it
2757 arrived entirely in memory by pushing the register-part onto the stack.
2758 In the special case of a DImode or DFmode that is split, we could put
2759 it together in a pseudoreg directly, but for now that's not worth
2760 bothering with. */
2761 if (data->partial != 0)
2762 {
2763 /* Handle calls that pass values in multiple non-contiguous
2764 locations. The Irix 6 ABI has examples of this. */
2765 if (GET_CODE (entry_parm) == PARALLEL)
2766 emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm,
2767 data->arg.type, int_size_in_bytes (data->arg.type));
2768 else
2769 {
2770 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2771 move_block_from_reg (REGNO (entry_parm),
2772 validize_mem (copy_rtx (stack_parm)),
2773 data->partial / UNITS_PER_WORD);
2774 }
2775
2776 entry_parm = stack_parm;
2777 }
2778
2779 /* If we didn't decide this parm came in a register, by default it came
2780 on the stack. */
2781 else if (entry_parm == NULL)
2782 entry_parm = stack_parm;
2783
2784 /* When an argument is passed in multiple locations, we can't make use
2785 of this information, but we can save some copying if the whole argument
2786 is passed in a single register. */
2787 else if (GET_CODE (entry_parm) == PARALLEL
2788 && data->nominal_mode != BLKmode
2789 && data->passed_mode != BLKmode)
2790 {
2791 size_t i, len = XVECLEN (entry_parm, 0);
2792
2793 for (i = 0; i < len; i++)
2794 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2795 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2796 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2797 == data->passed_mode)
2798 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2799 {
2800 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2801 break;
2802 }
2803 }
2804
2805 data->entry_parm = entry_parm;
2806}
2807
2808/* A subroutine of assign_parms. Reconstitute any values which were
2809 passed in multiple registers and would fit in a single register. */
2810
2811static void
2812assign_parm_remove_parallels (struct assign_parm_data_one *data)
2813{
2814 rtx entry_parm = data->entry_parm;
2815
2816 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2817 This can be done with register operations rather than on the
2818 stack, even if we will store the reconstituted parameter on the
2819 stack later. */
2820 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2821 {
2822 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2823 emit_group_store (parmreg, entry_parm, data->arg.type,
2824 GET_MODE_SIZE (GET_MODE (entry_parm)));
2825 entry_parm = parmreg;
2826 }
2827
2828 data->entry_parm = entry_parm;
2829}
2830
2831/* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2832 always valid and properly aligned. */
2833
2834static void
2835assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2836{
2837 rtx stack_parm = data->stack_parm;
2838
2839 /* If we can't trust the parm stack slot to be aligned enough for its
2840 ultimate type, don't use that slot after entry. We'll make another
2841 stack slot, if we need one. */
2842 if (stack_parm
2843 && ((GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm)
2844 && ((optab_handler (op: movmisalign_optab, mode: data->nominal_mode)
2845 != CODE_FOR_nothing)
2846 || targetm.slow_unaligned_access (data->nominal_mode,
2847 MEM_ALIGN (stack_parm))))
2848 || (data->nominal_type
2849 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2850 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2851 stack_parm = NULL;
2852
2853 /* If parm was passed in memory, and we need to convert it on entry,
2854 don't store it back in that same slot. */
2855 else if (data->entry_parm == stack_parm
2856 && data->nominal_mode != BLKmode
2857 && data->nominal_mode != data->passed_mode)
2858 stack_parm = NULL;
2859
2860 /* If stack protection is in effect for this function, don't leave any
2861 pointers in their passed stack slots. */
2862 else if (crtl->stack_protect_guard
2863 && (flag_stack_protect == SPCT_FLAG_ALL
2864 || data->arg.pass_by_reference
2865 || POINTER_TYPE_P (data->nominal_type)))
2866 stack_parm = NULL;
2867
2868 data->stack_parm = stack_parm;
2869}
2870
2871/* A subroutine of assign_parms. Return true if the current parameter
2872 should be stored as a BLKmode in the current frame. */
2873
2874static bool
2875assign_parm_setup_block_p (struct assign_parm_data_one *data)
2876{
2877 if (data->nominal_mode == BLKmode)
2878 return true;
2879 if (GET_MODE (data->entry_parm) == BLKmode)
2880 return true;
2881
2882#ifdef BLOCK_REG_PADDING
2883 /* Only assign_parm_setup_block knows how to deal with register arguments
2884 that are padded at the least significant end. */
2885 if (REG_P (data->entry_parm)
2886 && known_lt (GET_MODE_SIZE (data->arg.mode), UNITS_PER_WORD)
2887 && (BLOCK_REG_PADDING (data->passed_mode, data->arg.type, 1)
2888 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
2889 return true;
2890#endif
2891
2892 return false;
2893}
2894
2895/* A subroutine of assign_parms. Arrange for the parameter to be
2896 present and valid in DATA->STACK_RTL. */
2897
2898static void
2899assign_parm_setup_block (struct assign_parm_data_all *all,
2900 tree parm, struct assign_parm_data_one *data)
2901{
2902 rtx entry_parm = data->entry_parm;
2903 rtx stack_parm = data->stack_parm;
2904 rtx target_reg = NULL_RTX;
2905 bool in_conversion_seq = false;
2906 HOST_WIDE_INT size;
2907 HOST_WIDE_INT size_stored;
2908
2909 if (GET_CODE (entry_parm) == PARALLEL)
2910 entry_parm = emit_group_move_into_temps (entry_parm);
2911
2912 /* If we want the parameter in a pseudo, don't use a stack slot. */
2913 if (is_gimple_reg (parm) && use_register_for_decl (decl: parm))
2914 {
2915 tree def = ssa_default_def (cfun, parm);
2916 gcc_assert (def);
2917 machine_mode mode = promote_ssa_mode (def, NULL);
2918 rtx reg = gen_reg_rtx (mode);
2919 if (GET_CODE (reg) != CONCAT)
2920 stack_parm = reg;
2921 else
2922 {
2923 target_reg = reg;
2924 /* Avoid allocating a stack slot, if there isn't one
2925 preallocated by the ABI. It might seem like we should
2926 always prefer a pseudo, but converting between
2927 floating-point and integer modes goes through the stack
2928 on various machines, so it's better to use the reserved
2929 stack slot than to risk wasting it and allocating more
2930 for the conversion. */
2931 if (stack_parm == NULL_RTX)
2932 {
2933 int save = generating_concat_p;
2934 generating_concat_p = 0;
2935 stack_parm = gen_reg_rtx (mode);
2936 generating_concat_p = save;
2937 }
2938 }
2939 data->stack_parm = NULL;
2940 }
2941
2942 size = int_size_in_bytes (data->arg.type);
2943 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2944 if (stack_parm == 0)
2945 {
2946 HOST_WIDE_INT parm_align
2947 = (STRICT_ALIGNMENT
2948 ? MAX (DECL_ALIGN (parm), BITS_PER_WORD) : DECL_ALIGN (parm));
2949
2950 SET_DECL_ALIGN (parm, parm_align);
2951 if (DECL_ALIGN (parm) > MAX_SUPPORTED_STACK_ALIGNMENT)
2952 {
2953 rtx allocsize = gen_int_mode (size_stored, Pmode);
2954 get_dynamic_stack_size (&allocsize, 0, DECL_ALIGN (parm), NULL);
2955 stack_parm = assign_stack_local (BLKmode, UINTVAL (allocsize),
2956 MAX_SUPPORTED_STACK_ALIGNMENT);
2957 rtx addr = align_dynamic_address (XEXP (stack_parm, 0),
2958 DECL_ALIGN (parm));
2959 mark_reg_pointer (addr, DECL_ALIGN (parm));
2960 stack_parm = gen_rtx_MEM (GET_MODE (stack_parm), addr);
2961 MEM_NOTRAP_P (stack_parm) = 1;
2962 }
2963 else
2964 stack_parm = assign_stack_local (BLKmode, size: size_stored,
2965 DECL_ALIGN (parm));
2966 if (known_eq (GET_MODE_SIZE (GET_MODE (entry_parm)), size))
2967 PUT_MODE (x: stack_parm, GET_MODE (entry_parm));
2968 set_mem_attributes (stack_parm, parm, 1);
2969 }
2970
2971 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2972 calls that pass values in multiple non-contiguous locations. */
2973 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2974 {
2975 rtx mem;
2976
2977 /* Note that we will be storing an integral number of words.
2978 So we have to be careful to ensure that we allocate an
2979 integral number of words. We do this above when we call
2980 assign_stack_local if space was not allocated in the argument
2981 list. If it was, this will not work if PARM_BOUNDARY is not
2982 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2983 if it becomes a problem. Exception is when BLKmode arrives
2984 with arguments not conforming to word_mode. */
2985
2986 if (data->stack_parm == 0)
2987 ;
2988 else if (GET_CODE (entry_parm) == PARALLEL)
2989 ;
2990 else
2991 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2992
2993 mem = validize_mem (copy_rtx (stack_parm));
2994
2995 /* Handle values in multiple non-contiguous locations. */
2996 if (GET_CODE (entry_parm) == PARALLEL && !MEM_P (mem))
2997 emit_group_store (mem, entry_parm, data->arg.type, size);
2998 else if (GET_CODE (entry_parm) == PARALLEL)
2999 {
3000 push_to_sequence2 (all->first_conversion_insn,
3001 all->last_conversion_insn);
3002 emit_group_store (mem, entry_parm, data->arg.type, size);
3003 all->first_conversion_insn = get_insns ();
3004 all->last_conversion_insn = get_last_insn ();
3005 end_sequence ();
3006 in_conversion_seq = true;
3007 }
3008
3009 else if (size == 0)
3010 ;
3011
3012 /* If SIZE is that of a mode no bigger than a word, just use
3013 that mode's store operation. */
3014 else if (size <= UNITS_PER_WORD)
3015 {
3016 unsigned int bits = size * BITS_PER_UNIT;
3017 machine_mode mode = int_mode_for_size (size: bits, limit: 0).else_blk ();
3018
3019 if (mode != BLKmode
3020#ifdef BLOCK_REG_PADDING
3021 && (size == UNITS_PER_WORD
3022 || (BLOCK_REG_PADDING (mode, data->arg.type, 1)
3023 != (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
3024#endif
3025 )
3026 {
3027 rtx reg;
3028
3029 /* We are really truncating a word_mode value containing
3030 SIZE bytes into a value of mode MODE. If such an
3031 operation requires no actual instructions, we can refer
3032 to the value directly in mode MODE, otherwise we must
3033 start with the register in word_mode and explicitly
3034 convert it. */
3035 if (mode == word_mode
3036 || TRULY_NOOP_TRUNCATION_MODES_P (mode, word_mode))
3037 reg = gen_rtx_REG (mode, REGNO (entry_parm));
3038 else
3039 {
3040 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
3041 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
3042 }
3043
3044 /* We use adjust_address to get a new MEM with the mode
3045 changed. adjust_address is better than change_address
3046 for this purpose because adjust_address does not lose
3047 the MEM_EXPR associated with the MEM.
3048
3049 If the MEM_EXPR is lost, then optimizations like DSE
3050 assume the MEM escapes and thus is not subject to DSE. */
3051 emit_move_insn (adjust_address (mem, mode, 0), reg);
3052 }
3053
3054#ifdef BLOCK_REG_PADDING
3055 /* Storing the register in memory as a full word, as
3056 move_block_from_reg below would do, and then using the
3057 MEM in a smaller mode, has the effect of shifting right
3058 if BYTES_BIG_ENDIAN. If we're bypassing memory, the
3059 shifting must be explicit. */
3060 else if (!MEM_P (mem))
3061 {
3062 rtx x;
3063
3064 /* If the assert below fails, we should have taken the
3065 mode != BLKmode path above, unless we have downward
3066 padding of smaller-than-word arguments on a machine
3067 with little-endian bytes, which would likely require
3068 additional changes to work correctly. */
3069 gcc_checking_assert (BYTES_BIG_ENDIAN
3070 && (BLOCK_REG_PADDING (mode,
3071 data->arg.type, 1)
3072 == PAD_UPWARD));
3073
3074 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3075
3076 x = gen_rtx_REG (word_mode, REGNO (entry_parm));
3077 x = expand_shift (RSHIFT_EXPR, word_mode, x, by,
3078 NULL_RTX, 1);
3079 x = force_reg (word_mode, x);
3080 x = gen_lowpart_SUBREG (GET_MODE (mem), x);
3081
3082 emit_move_insn (mem, x);
3083 }
3084#endif
3085
3086 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
3087 machine must be aligned to the left before storing
3088 to memory. Note that the previous test doesn't
3089 handle all cases (e.g. SIZE == 3). */
3090 else if (size != UNITS_PER_WORD
3091#ifdef BLOCK_REG_PADDING
3092 && (BLOCK_REG_PADDING (mode, data->arg.type, 1)
3093 == PAD_DOWNWARD)
3094#else
3095 && BYTES_BIG_ENDIAN
3096#endif
3097 )
3098 {
3099 rtx tem, x;
3100 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3101 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
3102
3103 x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
3104 tem = change_address (mem, word_mode, 0);
3105 emit_move_insn (tem, x);
3106 }
3107 else
3108 move_block_from_reg (REGNO (entry_parm), mem,
3109 size_stored / UNITS_PER_WORD);
3110 }
3111 else if (!MEM_P (mem))
3112 {
3113 gcc_checking_assert (size > UNITS_PER_WORD);
3114#ifdef BLOCK_REG_PADDING
3115 gcc_checking_assert (BLOCK_REG_PADDING (GET_MODE (mem),
3116 data->arg.type, 0)
3117 == PAD_UPWARD);
3118#endif
3119 emit_move_insn (mem, entry_parm);
3120 }
3121 else
3122 move_block_from_reg (REGNO (entry_parm), mem,
3123 size_stored / UNITS_PER_WORD);
3124 }
3125 else if (data->stack_parm == 0 && !TYPE_EMPTY_P (data->arg.type))
3126 {
3127 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3128 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
3129 BLOCK_OP_NORMAL);
3130 all->first_conversion_insn = get_insns ();
3131 all->last_conversion_insn = get_last_insn ();
3132 end_sequence ();
3133 in_conversion_seq = true;
3134 }
3135
3136 if (target_reg)
3137 {
3138 if (!in_conversion_seq)
3139 emit_move_insn (target_reg, stack_parm);
3140 else
3141 {
3142 push_to_sequence2 (all->first_conversion_insn,
3143 all->last_conversion_insn);
3144 emit_move_insn (target_reg, stack_parm);
3145 all->first_conversion_insn = get_insns ();
3146 all->last_conversion_insn = get_last_insn ();
3147 end_sequence ();
3148 }
3149 stack_parm = target_reg;
3150 }
3151
3152 data->stack_parm = stack_parm;
3153 set_parm_rtl (parm, stack_parm);
3154}
3155
3156/* A subroutine of assign_parms. Allocate a pseudo to hold the current
3157 parameter. Get it there. Perform all ABI specified conversions. */
3158
3159static void
3160assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
3161 struct assign_parm_data_one *data)
3162{
3163 rtx parmreg, validated_mem;
3164 rtx equiv_stack_parm;
3165 machine_mode promoted_nominal_mode;
3166 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
3167 bool did_conversion = false;
3168 bool need_conversion, moved;
3169 enum insn_code icode;
3170 rtx rtl;
3171
3172 /* Store the parm in a pseudoregister during the function, but we may
3173 need to do it in a wider mode. Using 2 here makes the result
3174 consistent with promote_decl_mode and thus expand_expr_real_1. */
3175 promoted_nominal_mode
3176 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
3177 TREE_TYPE (current_function_decl), 2);
3178
3179 parmreg = gen_reg_rtx (promoted_nominal_mode);
3180 if (!DECL_ARTIFICIAL (parm))
3181 mark_user_reg (parmreg);
3182
3183 /* If this was an item that we received a pointer to,
3184 set rtl appropriately. */
3185 if (data->arg.pass_by_reference)
3186 {
3187 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->arg.type)), parmreg);
3188 set_mem_attributes (rtl, parm, 1);
3189 }
3190 else
3191 rtl = parmreg;
3192
3193 assign_parm_remove_parallels (data);
3194
3195 /* Copy the value into the register, thus bridging between
3196 assign_parm_find_data_types and expand_expr_real_1. */
3197
3198 equiv_stack_parm = data->stack_parm;
3199 validated_mem = validize_mem (copy_rtx (data->entry_parm));
3200
3201 need_conversion = (data->nominal_mode != data->passed_mode
3202 || promoted_nominal_mode != data->arg.mode);
3203 moved = false;
3204
3205 if (need_conversion
3206 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
3207 && data->nominal_mode == data->passed_mode
3208 && data->nominal_mode == GET_MODE (data->entry_parm))
3209 {
3210 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3211 mode, by the caller. We now have to convert it to
3212 NOMINAL_MODE, if different. However, PARMREG may be in
3213 a different mode than NOMINAL_MODE if it is being stored
3214 promoted.
3215
3216 If ENTRY_PARM is a hard register, it might be in a register
3217 not valid for operating in its mode (e.g., an odd-numbered
3218 register for a DFmode). In that case, moves are the only
3219 thing valid, so we can't do a convert from there. This
3220 occurs when the calling sequence allow such misaligned
3221 usages.
3222
3223 In addition, the conversion may involve a call, which could
3224 clobber parameters which haven't been copied to pseudo
3225 registers yet.
3226
3227 First, we try to emit an insn which performs the necessary
3228 conversion. We verify that this insn does not clobber any
3229 hard registers. */
3230
3231 rtx op0, op1;
3232
3233 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
3234 unsignedp);
3235
3236 op0 = parmreg;
3237 op1 = validated_mem;
3238 if (icode != CODE_FOR_nothing
3239 && insn_operand_matches (icode, opno: 0, operand: op0)
3240 && insn_operand_matches (icode, opno: 1, operand: op1))
3241 {
3242 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
3243 rtx_insn *insn, *insns;
3244 rtx t = op1;
3245 HARD_REG_SET hardregs;
3246
3247 start_sequence ();
3248 /* If op1 is a hard register that is likely spilled, first
3249 force it into a pseudo, otherwise combiner might extend
3250 its lifetime too much. */
3251 if (GET_CODE (t) == SUBREG)
3252 t = SUBREG_REG (t);
3253 if (REG_P (t)
3254 && HARD_REGISTER_P (t)
3255 && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
3256 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
3257 {
3258 t = gen_reg_rtx (GET_MODE (op1));
3259 emit_move_insn (t, op1);
3260 }
3261 else
3262 t = op1;
3263 rtx_insn *pat = gen_extend_insn (op0, t, promoted_nominal_mode,
3264 data->passed_mode, unsignedp);
3265 emit_insn (pat);
3266 insns = get_insns ();
3267
3268 moved = true;
3269 CLEAR_HARD_REG_SET (set&: hardregs);
3270 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3271 {
3272 if (INSN_P (insn))
3273 note_stores (insn, record_hard_reg_sets, &hardregs);
3274 if (!hard_reg_set_empty_p (x: hardregs))
3275 moved = false;
3276 }
3277
3278 end_sequence ();
3279
3280 if (moved)
3281 {
3282 emit_insn (insns);
3283 if (equiv_stack_parm != NULL_RTX)
3284 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3285 equiv_stack_parm);
3286 }
3287 }
3288 }
3289
3290 if (moved)
3291 /* Nothing to do. */
3292 ;
3293 else if (need_conversion)
3294 {
3295 /* We did not have an insn to convert directly, or the sequence
3296 generated appeared unsafe. We must first copy the parm to a
3297 pseudo reg, and save the conversion until after all
3298 parameters have been moved. */
3299
3300 int save_tree_used;
3301 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3302
3303 emit_move_insn (tempreg, validated_mem);
3304
3305 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3306 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3307
3308 if (partial_subreg_p (x: tempreg)
3309 && GET_MODE (tempreg) == data->nominal_mode
3310 && REG_P (SUBREG_REG (tempreg))
3311 && data->nominal_mode == data->passed_mode
3312 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm))
3313 {
3314 /* The argument is already sign/zero extended, so note it
3315 into the subreg. */
3316 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3317 SUBREG_PROMOTED_SET (tempreg, unsignedp);
3318 }
3319
3320 /* TREE_USED gets set erroneously during expand_assignment. */
3321 save_tree_used = TREE_USED (parm);
3322 SET_DECL_RTL (parm, rtl);
3323 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
3324 SET_DECL_RTL (parm, NULL_RTX);
3325 TREE_USED (parm) = save_tree_used;
3326 all->first_conversion_insn = get_insns ();
3327 all->last_conversion_insn = get_last_insn ();
3328 end_sequence ();
3329
3330 did_conversion = true;
3331 }
3332 else if (MEM_P (data->entry_parm)
3333 && GET_MODE_ALIGNMENT (promoted_nominal_mode)
3334 > MEM_ALIGN (data->entry_parm)
3335 && (((icode = optab_handler (op: movmisalign_optab,
3336 mode: promoted_nominal_mode))
3337 != CODE_FOR_nothing)
3338 || targetm.slow_unaligned_access (promoted_nominal_mode,
3339 MEM_ALIGN (data->entry_parm))))
3340 {
3341 if (icode != CODE_FOR_nothing)
3342 emit_insn (GEN_FCN (icode) (parmreg, validated_mem));
3343 else
3344 rtl = parmreg = extract_bit_field (validated_mem,
3345 GET_MODE_BITSIZE (mode: promoted_nominal_mode), 0,
3346 unsignedp, parmreg,
3347 promoted_nominal_mode, VOIDmode, false, NULL);
3348 }
3349 else
3350 emit_move_insn (parmreg, validated_mem);
3351
3352 /* If we were passed a pointer but the actual value can live in a register,
3353 retrieve it and use it directly. Note that we cannot use nominal_mode,
3354 because it will have been set to Pmode above, we must use the actual mode
3355 of the parameter instead. */
3356 if (data->arg.pass_by_reference && TYPE_MODE (TREE_TYPE (parm)) != BLKmode)
3357 {
3358 /* Use a stack slot for debugging purposes if possible. */
3359 if (use_register_for_decl (decl: parm))
3360 {
3361 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3362 mark_user_reg (parmreg);
3363 }
3364 else
3365 {
3366 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3367 TYPE_MODE (TREE_TYPE (parm)),
3368 TYPE_ALIGN (TREE_TYPE (parm)));
3369 parmreg
3370 = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)),
3371 size: GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))),
3372 align);
3373 set_mem_attributes (parmreg, parm, 1);
3374 }
3375
3376 /* We need to preserve an address based on VIRTUAL_STACK_VARS_REGNUM for
3377 the debug info in case it is not legitimate. */
3378 if (GET_MODE (parmreg) != GET_MODE (rtl))
3379 {
3380 rtx tempreg = gen_reg_rtx (GET_MODE (rtl));
3381 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3382
3383 push_to_sequence2 (all->first_conversion_insn,
3384 all->last_conversion_insn);
3385 emit_move_insn (tempreg, rtl);
3386 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3387 emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg,
3388 tempreg);
3389 all->first_conversion_insn = get_insns ();
3390 all->last_conversion_insn = get_last_insn ();
3391 end_sequence ();
3392
3393 did_conversion = true;
3394 }
3395 else
3396 emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg, rtl);
3397
3398 rtl = parmreg;
3399
3400 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3401 now the parm. */
3402 data->stack_parm = NULL;
3403 }
3404
3405 set_parm_rtl (parm, rtl);
3406
3407 /* Mark the register as eliminable if we did no conversion and it was
3408 copied from memory at a fixed offset, and the arg pointer was not
3409 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3410 offset formed an invalid address, such memory-equivalences as we
3411 make here would screw up life analysis for it. */
3412 if (data->nominal_mode == data->passed_mode
3413 && !did_conversion
3414 && data->stack_parm != 0
3415 && MEM_P (data->stack_parm)
3416 && data->locate.offset.var == 0
3417 && reg_mentioned_p (virtual_incoming_args_rtx,
3418 XEXP (data->stack_parm, 0)))
3419 {
3420 rtx_insn *linsn = get_last_insn ();
3421 rtx_insn *sinsn;
3422 rtx set;
3423
3424 /* Mark complex types separately. */
3425 if (GET_CODE (parmreg) == CONCAT)
3426 {
3427 scalar_mode submode = GET_MODE_INNER (GET_MODE (parmreg));
3428 int regnor = REGNO (XEXP (parmreg, 0));
3429 int regnoi = REGNO (XEXP (parmreg, 1));
3430 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3431 rtx stacki = adjust_address_nv (data->stack_parm, submode,
3432 GET_MODE_SIZE (submode));
3433
3434 /* Scan backwards for the set of the real and
3435 imaginary parts. */
3436 for (sinsn = linsn; sinsn != 0;
3437 sinsn = prev_nonnote_insn (sinsn))
3438 {
3439 set = single_set (insn: sinsn);
3440 if (set == 0)
3441 continue;
3442
3443 if (SET_DEST (set) == regno_reg_rtx [regnoi])
3444 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
3445 else if (SET_DEST (set) == regno_reg_rtx [regnor])
3446 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
3447 }
3448 }
3449 else
3450 set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
3451 }
3452
3453 /* For pointer data type, suggest pointer register. */
3454 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3455 mark_reg_pointer (parmreg,
3456 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3457}
3458
3459/* A subroutine of assign_parms. Allocate stack space to hold the current
3460 parameter. Get it there. Perform all ABI specified conversions. */
3461
3462static void
3463assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3464 struct assign_parm_data_one *data)
3465{
3466 /* Value must be stored in the stack slot STACK_PARM during function
3467 execution. */
3468 bool to_conversion = false;
3469
3470 assign_parm_remove_parallels (data);
3471
3472 if (data->arg.mode != data->nominal_mode)
3473 {
3474 /* Conversion is required. */
3475 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3476
3477 emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm)));
3478
3479 /* Some ABIs require scalar floating point modes to be passed
3480 in a wider scalar integer mode. We need to explicitly
3481 truncate to an integer mode of the correct precision before
3482 using a SUBREG to reinterpret as a floating point value. */
3483 if (SCALAR_FLOAT_MODE_P (data->nominal_mode)
3484 && SCALAR_INT_MODE_P (data->arg.mode)
3485 && known_lt (GET_MODE_SIZE (data->nominal_mode),
3486 GET_MODE_SIZE (data->arg.mode)))
3487 tempreg = convert_wider_int_to_float (mode: data->nominal_mode,
3488 imode: data->arg.mode, x: tempreg);
3489
3490 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3491 to_conversion = true;
3492
3493 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3494 TYPE_UNSIGNED (TREE_TYPE (parm)));
3495
3496 if (data->stack_parm)
3497 {
3498 poly_int64 offset
3499 = subreg_lowpart_offset (outermode: data->nominal_mode,
3500 GET_MODE (data->stack_parm));
3501 /* ??? This may need a big-endian conversion on sparc64. */
3502 data->stack_parm
3503 = adjust_address (data->stack_parm, data->nominal_mode, 0);
3504 if (maybe_ne (a: offset, b: 0) && MEM_OFFSET_KNOWN_P (data->stack_parm))
3505 set_mem_offset (data->stack_parm,
3506 MEM_OFFSET (data->stack_parm) + offset);
3507 }
3508 }
3509
3510 if (data->entry_parm != data->stack_parm)
3511 {
3512 rtx src, dest;
3513
3514 if (data->stack_parm == 0)
3515 {
3516 int align = STACK_SLOT_ALIGNMENT (data->arg.type,
3517 GET_MODE (data->entry_parm),
3518 TYPE_ALIGN (data->arg.type));
3519 if (align < (int)GET_MODE_ALIGNMENT (GET_MODE (data->entry_parm))
3520 && ((optab_handler (op: movmisalign_optab,
3521 GET_MODE (data->entry_parm))
3522 != CODE_FOR_nothing)
3523 || targetm.slow_unaligned_access (GET_MODE (data->entry_parm),
3524 align)))
3525 align = GET_MODE_ALIGNMENT (GET_MODE (data->entry_parm));
3526 data->stack_parm
3527 = assign_stack_local (GET_MODE (data->entry_parm),
3528 size: GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3529 align);
3530 align = MEM_ALIGN (data->stack_parm);
3531 set_mem_attributes (data->stack_parm, parm, 1);
3532 set_mem_align (data->stack_parm, align);
3533 }
3534
3535 dest = validize_mem (copy_rtx (data->stack_parm));
3536 src = validize_mem (copy_rtx (data->entry_parm));
3537
3538 if (TYPE_EMPTY_P (data->arg.type))
3539 /* Empty types don't really need to be copied. */;
3540 else if (MEM_P (src))
3541 {
3542 /* Use a block move to handle potentially misaligned entry_parm. */
3543 if (!to_conversion)
3544 push_to_sequence2 (all->first_conversion_insn,
3545 all->last_conversion_insn);
3546 to_conversion = true;
3547
3548 emit_block_move (dest, src,
3549 GEN_INT (int_size_in_bytes (data->arg.type)),
3550 BLOCK_OP_NORMAL);
3551 }
3552 else
3553 {
3554 if (!REG_P (src))
3555 src = force_reg (GET_MODE (src), src);
3556 emit_move_insn (dest, src);
3557 }
3558 }
3559
3560 if (to_conversion)
3561 {
3562 all->first_conversion_insn = get_insns ();
3563 all->last_conversion_insn = get_last_insn ();
3564 end_sequence ();
3565 }
3566
3567 set_parm_rtl (parm, data->stack_parm);
3568}
3569
3570/* A subroutine of assign_parms. If the ABI splits complex arguments, then
3571 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3572
3573static void
3574assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3575 vec<tree> fnargs)
3576{
3577 tree parm;
3578 tree orig_fnargs = all->orig_fnargs;
3579 unsigned i = 0;
3580
3581 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
3582 {
3583 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3584 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3585 {
3586 rtx tmp, real, imag;
3587 scalar_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3588
3589 real = DECL_RTL (fnargs[i]);
3590 imag = DECL_RTL (fnargs[i + 1]);
3591 if (inner != GET_MODE (real))
3592 {
3593 real = gen_lowpart_SUBREG (inner, real);
3594 imag = gen_lowpart_SUBREG (inner, imag);
3595 }
3596
3597 if (TREE_ADDRESSABLE (parm))
3598 {
3599 rtx rmem, imem;
3600 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3601 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3602 DECL_MODE (parm),
3603 TYPE_ALIGN (TREE_TYPE (parm)));
3604
3605 /* split_complex_arg put the real and imag parts in
3606 pseudos. Move them to memory. */
3607 tmp = assign_stack_local (DECL_MODE (parm), size, align);
3608 set_mem_attributes (tmp, parm, 1);
3609 rmem = adjust_address_nv (tmp, inner, 0);
3610 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3611 push_to_sequence2 (all->first_conversion_insn,
3612 all->last_conversion_insn);
3613 emit_move_insn (rmem, real);
3614 emit_move_insn (imem, imag);
3615 all->first_conversion_insn = get_insns ();
3616 all->last_conversion_insn = get_last_insn ();
3617 end_sequence ();
3618 }
3619 else
3620 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3621 set_parm_rtl (parm, tmp);
3622
3623 real = DECL_INCOMING_RTL (fnargs[i]);
3624 imag = DECL_INCOMING_RTL (fnargs[i + 1]);
3625 if (inner != GET_MODE (real))
3626 {
3627 real = gen_lowpart_SUBREG (inner, real);
3628 imag = gen_lowpart_SUBREG (inner, imag);
3629 }
3630 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3631 set_decl_incoming_rtl (parm, tmp, false);
3632 i++;
3633 }
3634 }
3635}
3636
3637/* Assign RTL expressions to the function's parameters. This may involve
3638 copying them into registers and using those registers as the DECL_RTL. */
3639
3640static void
3641assign_parms (tree fndecl)
3642{
3643 struct assign_parm_data_all all;
3644 tree parm;
3645 vec<tree> fnargs;
3646 unsigned i;
3647
3648 crtl->args.internal_arg_pointer
3649 = targetm.calls.internal_arg_pointer ();
3650
3651 assign_parms_initialize_all (all: &all);
3652 fnargs = assign_parms_augmented_arg_list (all: &all);
3653
3654 if (TYPE_NO_NAMED_ARGS_STDARG_P (TREE_TYPE (fndecl))
3655 && fnargs.is_empty ())
3656 {
3657 struct assign_parm_data_one data = {};
3658 assign_parms_setup_varargs (all: &all, data: &data, no_rtl: false);
3659 }
3660
3661 FOR_EACH_VEC_ELT (fnargs, i, parm)
3662 {
3663 struct assign_parm_data_one data;
3664
3665 /* Extract the type of PARM; adjust it according to ABI. */
3666 assign_parm_find_data_types (all: &all, parm, data: &data);
3667
3668 /* Early out for errors and void parameters. */
3669 if (data.passed_mode == VOIDmode)
3670 {
3671 SET_DECL_RTL (parm, const0_rtx);
3672 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3673 continue;
3674 }
3675
3676 /* Estimate stack alignment from parameter alignment. */
3677 if (SUPPORTS_STACK_ALIGNMENT)
3678 {
3679 unsigned int align
3680 = targetm.calls.function_arg_boundary (data.arg.mode,
3681 data.arg.type);
3682 align = MINIMUM_ALIGNMENT (data.arg.type, data.arg.mode, align);
3683 if (TYPE_ALIGN (data.nominal_type) > align)
3684 align = MINIMUM_ALIGNMENT (data.nominal_type,
3685 TYPE_MODE (data.nominal_type),
3686 TYPE_ALIGN (data.nominal_type));
3687 if (crtl->stack_alignment_estimated < align)
3688 {
3689 gcc_assert (!crtl->stack_realign_processed);
3690 crtl->stack_alignment_estimated = align;
3691 }
3692 }
3693
3694 /* Find out where the parameter arrives in this function. */
3695 assign_parm_find_entry_rtl (all: &all, data: &data);
3696
3697 /* Find out where stack space for this parameter might be. */
3698 if (assign_parm_is_stack_parm (all: &all, data: &data))
3699 {
3700 assign_parm_find_stack_rtl (parm, data: &data);
3701 assign_parm_adjust_entry_rtl (data: &data);
3702 /* For arguments that occupy no space in the parameter
3703 passing area, have non-zero size and have address taken,
3704 force creation of a stack slot so that they have distinct
3705 address from other parameters. */
3706 if (TYPE_EMPTY_P (data.arg.type)
3707 && TREE_ADDRESSABLE (parm)
3708 && data.entry_parm == data.stack_parm
3709 && MEM_P (data.entry_parm)
3710 && int_size_in_bytes (data.arg.type))
3711 data.stack_parm = NULL_RTX;
3712 }
3713 /* Record permanently how this parm was passed. */
3714 if (data.arg.pass_by_reference)
3715 {
3716 rtx incoming_rtl
3717 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.arg.type)),
3718 data.entry_parm);
3719 set_decl_incoming_rtl (parm, incoming_rtl, true);
3720 }
3721 else
3722 set_decl_incoming_rtl (parm, data.entry_parm, false);
3723
3724 assign_parm_adjust_stack_rtl (data: &data);
3725
3726 if (assign_parm_setup_block_p (data: &data))
3727 assign_parm_setup_block (all: &all, parm, data: &data);
3728 else if (data.arg.pass_by_reference || use_register_for_decl (decl: parm))
3729 assign_parm_setup_reg (all: &all, parm, data: &data);
3730 else
3731 assign_parm_setup_stack (all: &all, parm, data: &data);
3732
3733 if (cfun->stdarg && !DECL_CHAIN (parm))
3734 assign_parms_setup_varargs (all: &all, data: &data, no_rtl: false);
3735
3736 /* Update info on where next arg arrives in registers. */
3737 targetm.calls.function_arg_advance (all.args_so_far, data.arg);
3738 }
3739
3740 if (targetm.calls.split_complex_arg)
3741 assign_parms_unsplit_complex (all: &all, fnargs);
3742
3743 fnargs.release ();
3744
3745 /* Output all parameter conversion instructions (possibly including calls)
3746 now that all parameters have been copied out of hard registers. */
3747 emit_insn (all.first_conversion_insn);
3748
3749 /* Estimate reload stack alignment from scalar return mode. */
3750 if (SUPPORTS_STACK_ALIGNMENT)
3751 {
3752 if (DECL_RESULT (fndecl))
3753 {
3754 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3755 machine_mode mode = TYPE_MODE (type);
3756
3757 if (mode != BLKmode
3758 && mode != VOIDmode
3759 && !AGGREGATE_TYPE_P (type))
3760 {
3761 unsigned int align = GET_MODE_ALIGNMENT (mode);
3762 if (crtl->stack_alignment_estimated < align)
3763 {
3764 gcc_assert (!crtl->stack_realign_processed);
3765 crtl->stack_alignment_estimated = align;
3766 }
3767 }
3768 }
3769 }
3770
3771 /* If we are receiving a struct value address as the first argument, set up
3772 the RTL for the function result. As this might require code to convert
3773 the transmitted address to Pmode, we do this here to ensure that possible
3774 preliminary conversions of the address have been emitted already. */
3775 if (all.function_result_decl)
3776 {
3777 tree result = DECL_RESULT (current_function_decl);
3778 rtx addr = DECL_RTL (all.function_result_decl);
3779 rtx x;
3780
3781 if (DECL_BY_REFERENCE (result))
3782 {
3783 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3784 x = addr;
3785 }
3786 else
3787 {
3788 SET_DECL_VALUE_EXPR (result,
3789 build1 (INDIRECT_REF, TREE_TYPE (result),
3790 all.function_result_decl));
3791 addr = convert_memory_address (Pmode, addr);
3792 x = gen_rtx_MEM (DECL_MODE (result), addr);
3793 set_mem_attributes (x, result, 1);
3794 }
3795
3796 DECL_HAS_VALUE_EXPR_P (result) = 1;
3797
3798 set_parm_rtl (result, x);
3799 }
3800
3801 /* We have aligned all the args, so add space for the pretend args. */
3802 crtl->args.pretend_args_size = all.pretend_args_size;
3803 all.stack_args_size.constant += all.extra_pretend_bytes;
3804 crtl->args.size = all.stack_args_size.constant;
3805
3806 /* Adjust function incoming argument size for alignment and
3807 minimum length. */
3808
3809 crtl->args.size = upper_bound (crtl->args.size, b: all.reg_parm_stack_space);
3810 crtl->args.size = aligned_upper_bound (crtl->args.size,
3811 PARM_BOUNDARY / BITS_PER_UNIT);
3812
3813 if (ARGS_GROW_DOWNWARD)
3814 {
3815 crtl->args.arg_offset_rtx
3816 = (all.stack_args_size.var == 0
3817 ? gen_int_mode (-all.stack_args_size.constant, Pmode)
3818 : expand_expr (size_diffop (all.stack_args_size.var,
3819 size_int (-all.stack_args_size.constant)),
3820 NULL_RTX, VOIDmode, modifier: EXPAND_NORMAL));
3821 }
3822 else
3823 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3824
3825 /* See how many bytes, if any, of its args a function should try to pop
3826 on return. */
3827
3828 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3829 TREE_TYPE (fndecl),
3830 crtl->args.size);
3831
3832 /* For stdarg.h function, save info about
3833 regs and stack space used by the named args. */
3834
3835 crtl->args.info = all.args_so_far_v;
3836
3837 /* Set the rtx used for the function return value. Put this in its
3838 own variable so any optimizers that need this information don't have
3839 to include tree.h. Do this here so it gets done when an inlined
3840 function gets output. */
3841
3842 crtl->return_rtx
3843 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3844 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3845
3846 /* If scalar return value was computed in a pseudo-reg, or was a named
3847 return value that got dumped to the stack, copy that to the hard
3848 return register. */
3849 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3850 {
3851 tree decl_result = DECL_RESULT (fndecl);
3852 rtx decl_rtl = DECL_RTL (decl_result);
3853
3854 if (REG_P (decl_rtl)
3855 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3856 : DECL_REGISTER (decl_result))
3857 {
3858 rtx real_decl_rtl;
3859
3860 /* Unless the psABI says not to. */
3861 if (TYPE_EMPTY_P (TREE_TYPE (decl_result)))
3862 real_decl_rtl = NULL_RTX;
3863 else
3864 {
3865 real_decl_rtl
3866 = targetm.calls.function_value (TREE_TYPE (decl_result),
3867 fndecl, true);
3868 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3869 }
3870 /* The delay slot scheduler assumes that crtl->return_rtx
3871 holds the hard register containing the return value, not a
3872 temporary pseudo. */
3873 crtl->return_rtx = real_decl_rtl;
3874 }
3875 }
3876}
3877
3878/* Gimplify the parameter list for current_function_decl. This involves
3879 evaluating SAVE_EXPRs of variable sized parameters and generating code
3880 to implement callee-copies reference parameters. Returns a sequence of
3881 statements to add to the beginning of the function. */
3882
3883gimple_seq
3884gimplify_parameters (gimple_seq *cleanup)
3885{
3886 struct assign_parm_data_all all;
3887 tree parm;
3888 gimple_seq stmts = NULL;
3889 vec<tree> fnargs;
3890 unsigned i;
3891
3892 assign_parms_initialize_all (all: &all);
3893 fnargs = assign_parms_augmented_arg_list (all: &all);
3894
3895 FOR_EACH_VEC_ELT (fnargs, i, parm)
3896 {
3897 struct assign_parm_data_one data;
3898
3899 /* Extract the type of PARM; adjust it according to ABI. */
3900 assign_parm_find_data_types (all: &all, parm, data: &data);
3901
3902 /* Early out for errors and void parameters. */
3903 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3904 continue;
3905
3906 /* Update info on where next arg arrives in registers. */
3907 targetm.calls.function_arg_advance (all.args_so_far, data.arg);
3908
3909 /* ??? Once upon a time variable_size stuffed parameter list
3910 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3911 turned out to be less than manageable in the gimple world.
3912 Now we have to hunt them down ourselves. */
3913 gimplify_type_sizes (TREE_TYPE (parm), &stmts);
3914
3915 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
3916 {
3917 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3918 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3919 }
3920
3921 if (data.arg.pass_by_reference)
3922 {
3923 tree type = TREE_TYPE (data.arg.type);
3924 function_arg_info orig_arg (type, data.arg.named);
3925 if (reference_callee_copied (&all.args_so_far_v, orig_arg))
3926 {
3927 tree local, t;
3928
3929 /* For constant-sized objects, this is trivial; for
3930 variable-sized objects, we have to play games. */
3931 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3932 && !(flag_stack_check == GENERIC_STACK_CHECK
3933 && compare_tree_int (DECL_SIZE_UNIT (parm),
3934 STACK_CHECK_MAX_VAR_SIZE) > 0))
3935 {
3936 local = create_tmp_var (type, get_name (parm));
3937 DECL_IGNORED_P (local) = 0;
3938 /* If PARM was addressable, move that flag over
3939 to the local copy, as its address will be taken,
3940 not the PARMs. Keep the parms address taken
3941 as we'll query that flag during gimplification. */
3942 if (TREE_ADDRESSABLE (parm))
3943 TREE_ADDRESSABLE (local) = 1;
3944 if (DECL_NOT_GIMPLE_REG_P (parm))
3945 DECL_NOT_GIMPLE_REG_P (local) = 1;
3946
3947 if (!is_gimple_reg (local)
3948 && flag_stack_reuse != SR_NONE)
3949 {
3950 tree clobber = build_clobber (type);
3951 gimple *clobber_stmt;
3952 clobber_stmt = gimple_build_assign (local, clobber);
3953 gimple_seq_add_stmt (cleanup, clobber_stmt);
3954 }
3955 }
3956 else
3957 {
3958 tree ptr_type, addr;
3959
3960 ptr_type = build_pointer_type (type);
3961 addr = create_tmp_reg (ptr_type, get_name (parm));
3962 DECL_IGNORED_P (addr) = 0;
3963 local = build_fold_indirect_ref (addr);
3964
3965 t = build_alloca_call_expr (DECL_SIZE_UNIT (parm),
3966 DECL_ALIGN (parm),
3967 max_int_size_in_bytes (type));
3968 /* The call has been built for a variable-sized object. */
3969 CALL_ALLOCA_FOR_VAR_P (t) = 1;
3970 t = fold_convert (ptr_type, t);
3971 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
3972 gimplify_and_add (t, &stmts);
3973 }
3974
3975 gimplify_assign (local, parm, &stmts);
3976
3977 SET_DECL_VALUE_EXPR (parm, local);
3978 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3979 }
3980 }
3981 }
3982
3983 fnargs.release ();
3984
3985 return stmts;
3986}
3987
3988/* Compute the size and offset from the start of the stacked arguments for a
3989 parm passed in mode PASSED_MODE and with type TYPE.
3990
3991 INITIAL_OFFSET_PTR points to the current offset into the stacked
3992 arguments.
3993
3994 The starting offset and size for this parm are returned in
3995 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3996 nonzero, the offset is that of stack slot, which is returned in
3997 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3998 padding required from the initial offset ptr to the stack slot.
3999
4000 IN_REGS is nonzero if the argument will be passed in registers. It will
4001 never be set if REG_PARM_STACK_SPACE is not defined.
4002
4003 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
4004 for arguments which are passed in registers.
4005
4006 FNDECL is the function in which the argument was defined.
4007
4008 There are two types of rounding that are done. The first, controlled by
4009 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
4010 argument list to be aligned to the specific boundary (in bits). This
4011 rounding affects the initial and starting offsets, but not the argument
4012 size.
4013
4014 The second, controlled by TARGET_FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4015 optionally rounds the size of the parm to PARM_BOUNDARY. The
4016 initial offset is not affected by this rounding, while the size always
4017 is and the starting offset may be. */
4018
4019/* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
4020 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
4021 callers pass in the total size of args so far as
4022 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
4023
4024void
4025locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs,
4026 int reg_parm_stack_space, int partial,
4027 tree fndecl ATTRIBUTE_UNUSED,
4028 struct args_size *initial_offset_ptr,
4029 struct locate_and_pad_arg_data *locate)
4030{
4031 tree sizetree;
4032 pad_direction where_pad;
4033 unsigned int boundary, round_boundary;
4034 int part_size_in_regs;
4035
4036 /* If we have found a stack parm before we reach the end of the
4037 area reserved for registers, skip that area. */
4038 if (! in_regs)
4039 {
4040 if (reg_parm_stack_space > 0)
4041 {
4042 if (initial_offset_ptr->var
4043 || !ordered_p (a: initial_offset_ptr->constant,
4044 b: reg_parm_stack_space))
4045 {
4046 initial_offset_ptr->var
4047 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4048 ssize_int (reg_parm_stack_space));
4049 initial_offset_ptr->constant = 0;
4050 }
4051 else
4052 initial_offset_ptr->constant
4053 = ordered_max (a: initial_offset_ptr->constant,
4054 b: reg_parm_stack_space);
4055 }
4056 }
4057
4058 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
4059
4060 sizetree = (type
4061 ? arg_size_in_bytes (type)
4062 : size_int (GET_MODE_SIZE (passed_mode)));
4063 where_pad = targetm.calls.function_arg_padding (passed_mode, type);
4064 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
4065 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
4066 type);
4067 locate->where_pad = where_pad;
4068
4069 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
4070 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4071 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4072
4073 locate->boundary = boundary;
4074
4075 if (SUPPORTS_STACK_ALIGNMENT)
4076 {
4077 /* stack_alignment_estimated can't change after stack has been
4078 realigned. */
4079 if (crtl->stack_alignment_estimated < boundary)
4080 {
4081 if (!crtl->stack_realign_processed)
4082 crtl->stack_alignment_estimated = boundary;
4083 else
4084 {
4085 /* If stack is realigned and stack alignment value
4086 hasn't been finalized, it is OK not to increase
4087 stack_alignment_estimated. The bigger alignment
4088 requirement is recorded in stack_alignment_needed
4089 below. */
4090 gcc_assert (!crtl->stack_realign_finalized
4091 && crtl->stack_realign_needed);
4092 }
4093 }
4094 }
4095
4096 if (ARGS_GROW_DOWNWARD)
4097 {
4098 locate->slot_offset.constant = -initial_offset_ptr->constant;
4099 if (initial_offset_ptr->var)
4100 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
4101 initial_offset_ptr->var);
4102
4103 {
4104 tree s2 = sizetree;
4105 if (where_pad != PAD_NONE
4106 && (!tree_fits_uhwi_p (sizetree)
4107 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4108 s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
4109 SUB_PARM_SIZE (locate->slot_offset, s2);
4110 }
4111
4112 locate->slot_offset.constant += part_size_in_regs;
4113
4114 if (!in_regs || reg_parm_stack_space > 0)
4115 pad_to_arg_alignment (&locate->slot_offset, boundary,
4116 &locate->alignment_pad);
4117
4118 locate->size.constant = (-initial_offset_ptr->constant
4119 - locate->slot_offset.constant);
4120 if (initial_offset_ptr->var)
4121 locate->size.var = size_binop (MINUS_EXPR,
4122 size_binop (MINUS_EXPR,
4123 ssize_int (0),
4124 initial_offset_ptr->var),
4125 locate->slot_offset.var);
4126
4127 /* Pad_below needs the pre-rounded size to know how much to pad
4128 below. */
4129 locate->offset = locate->slot_offset;
4130 if (where_pad == PAD_DOWNWARD)
4131 pad_below (&locate->offset, passed_mode, sizetree);
4132
4133 }
4134 else
4135 {
4136 if (!in_regs || reg_parm_stack_space > 0)
4137 pad_to_arg_alignment (initial_offset_ptr, boundary,
4138 &locate->alignment_pad);
4139 locate->slot_offset = *initial_offset_ptr;
4140
4141#ifdef PUSH_ROUNDING
4142 if (passed_mode != BLKmode)
4143 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4144#endif
4145
4146 /* Pad_below needs the pre-rounded size to know how much to pad below
4147 so this must be done before rounding up. */
4148 locate->offset = locate->slot_offset;
4149 if (where_pad == PAD_DOWNWARD)
4150 pad_below (&locate->offset, passed_mode, sizetree);
4151
4152 if (where_pad != PAD_NONE
4153 && (!tree_fits_uhwi_p (sizetree)
4154 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4155 sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
4156
4157 ADD_PARM_SIZE (locate->size, sizetree);
4158
4159 locate->size.constant -= part_size_in_regs;
4160 }
4161
4162 locate->offset.constant
4163 += targetm.calls.function_arg_offset (passed_mode, type);
4164}
4165
4166/* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4167 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4168
4169static void
4170pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
4171 struct args_size *alignment_pad)
4172{
4173 tree save_var = NULL_TREE;
4174 poly_int64 save_constant = 0;
4175 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4176 poly_int64 sp_offset = STACK_POINTER_OFFSET;
4177
4178#ifdef SPARC_STACK_BOUNDARY_HACK
4179 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
4180 the real alignment of %sp. However, when it does this, the
4181 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
4182 if (SPARC_STACK_BOUNDARY_HACK)
4183 sp_offset = 0;
4184#endif
4185
4186 if (boundary > PARM_BOUNDARY)
4187 {
4188 save_var = offset_ptr->var;
4189 save_constant = offset_ptr->constant;
4190 }
4191
4192 alignment_pad->var = NULL_TREE;
4193 alignment_pad->constant = 0;
4194
4195 if (boundary > BITS_PER_UNIT)
4196 {
4197 int misalign;
4198 if (offset_ptr->var
4199 || !known_misalignment (value: offset_ptr->constant + sp_offset,
4200 align: boundary_in_bytes, misalign: &misalign))
4201 {
4202 tree sp_offset_tree = ssize_int (sp_offset);
4203 tree offset = size_binop (PLUS_EXPR,
4204 ARGS_SIZE_TREE (*offset_ptr),
4205 sp_offset_tree);
4206 tree rounded;
4207 if (ARGS_GROW_DOWNWARD)
4208 rounded = round_down (offset, boundary / BITS_PER_UNIT);
4209 else
4210 rounded = round_up (offset, boundary / BITS_PER_UNIT);
4211
4212 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
4213 /* ARGS_SIZE_TREE includes constant term. */
4214 offset_ptr->constant = 0;
4215 if (boundary > PARM_BOUNDARY)
4216 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
4217 save_var);
4218 }
4219 else
4220 {
4221 if (ARGS_GROW_DOWNWARD)
4222 offset_ptr->constant -= misalign;
4223 else
4224 offset_ptr->constant += -misalign & (boundary_in_bytes - 1);
4225
4226 if (boundary > PARM_BOUNDARY)
4227 alignment_pad->constant = offset_ptr->constant - save_constant;
4228 }
4229 }
4230}
4231
4232static void
4233pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree)
4234{
4235 unsigned int align = PARM_BOUNDARY / BITS_PER_UNIT;
4236 int misalign;
4237 if (passed_mode != BLKmode
4238 && known_misalignment (value: GET_MODE_SIZE (mode: passed_mode), align, misalign: &misalign))
4239 offset_ptr->constant += -misalign & (align - 1);
4240 else
4241 {
4242 if (TREE_CODE (sizetree) != INTEGER_CST
4243 || (TREE_INT_CST_LOW (sizetree) & (align - 1)) != 0)
4244 {
4245 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4246 tree s2 = round_up (sizetree, align);
4247 /* Add it in. */
4248 ADD_PARM_SIZE (*offset_ptr, s2);
4249 SUB_PARM_SIZE (*offset_ptr, sizetree);
4250 }
4251 }
4252}
4253
4254
4255/* True if register REGNO was alive at a place where `setjmp' was
4256 called and was set more than once or is an argument. Such regs may
4257 be clobbered by `longjmp'. */
4258
4259static bool
4260regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
4261{
4262 /* There appear to be cases where some local vars never reach the
4263 backend but have bogus regnos. */
4264 if (regno >= max_reg_num ())
4265 return false;
4266
4267 return ((REG_N_SETS (regno) > 1
4268 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
4269 regno))
4270 && REGNO_REG_SET_P (setjmp_crosses, regno));
4271}
4272
4273/* Walk the tree of blocks describing the binding levels within a
4274 function and warn about variables the might be killed by setjmp or
4275 vfork. This is done after calling flow_analysis before register
4276 allocation since that will clobber the pseudo-regs to hard
4277 regs. */
4278
4279static void
4280setjmp_vars_warning (bitmap setjmp_crosses, tree block)
4281{
4282 tree decl, sub;
4283
4284 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
4285 {
4286 if (VAR_P (decl)
4287 && DECL_RTL_SET_P (decl)
4288 && REG_P (DECL_RTL (decl))
4289 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4290 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
4291 " %<longjmp%> or %<vfork%>", decl);
4292 }
4293
4294 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
4295 setjmp_vars_warning (setjmp_crosses, block: sub);
4296}
4297
4298/* Do the appropriate part of setjmp_vars_warning
4299 but for arguments instead of local variables. */
4300
4301static void
4302setjmp_args_warning (bitmap setjmp_crosses)
4303{
4304 tree decl;
4305 for (decl = DECL_ARGUMENTS (current_function_decl);
4306 decl; decl = DECL_CHAIN (decl))
4307 if (DECL_RTL (decl) != 0
4308 && REG_P (DECL_RTL (decl))
4309 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4310 warning (OPT_Wclobbered,
4311 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4312 decl);
4313}
4314
4315/* Generate warning messages for variables live across setjmp. */
4316
4317void
4318generate_setjmp_warnings (void)
4319{
4320 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4321
4322 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS
4323 || bitmap_empty_p (map: setjmp_crosses))
4324 return;
4325
4326 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4327 setjmp_args_warning (setjmp_crosses);
4328}
4329
4330
4331/* Reverse the order of elements in the fragment chain T of blocks,
4332 and return the new head of the chain (old last element).
4333 In addition to that clear BLOCK_SAME_RANGE flags when needed
4334 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4335 its super fragment origin. */
4336
4337static tree
4338block_fragments_nreverse (tree t)
4339{
4340 tree prev = 0, block, next, prev_super = 0;
4341 tree super = BLOCK_SUPERCONTEXT (t);
4342 if (BLOCK_FRAGMENT_ORIGIN (super))
4343 super = BLOCK_FRAGMENT_ORIGIN (super);
4344 for (block = t; block; block = next)
4345 {
4346 next = BLOCK_FRAGMENT_CHAIN (block);
4347 BLOCK_FRAGMENT_CHAIN (block) = prev;
4348 if ((prev && !BLOCK_SAME_RANGE (prev))
4349 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
4350 != prev_super))
4351 BLOCK_SAME_RANGE (block) = 0;
4352 prev_super = BLOCK_SUPERCONTEXT (block);
4353 BLOCK_SUPERCONTEXT (block) = super;
4354 prev = block;
4355 }
4356 t = BLOCK_FRAGMENT_ORIGIN (t);
4357 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
4358 != prev_super)
4359 BLOCK_SAME_RANGE (t) = 0;
4360 BLOCK_SUPERCONTEXT (t) = super;
4361 return prev;
4362}
4363
4364/* Reverse the order of elements in the chain T of blocks,
4365 and return the new head of the chain (old last element).
4366 Also do the same on subblocks and reverse the order of elements
4367 in BLOCK_FRAGMENT_CHAIN as well. */
4368
4369static tree
4370blocks_nreverse_all (tree t)
4371{
4372 tree prev = 0, block, next;
4373 for (block = t; block; block = next)
4374 {
4375 next = BLOCK_CHAIN (block);
4376 BLOCK_CHAIN (block) = prev;
4377 if (BLOCK_FRAGMENT_CHAIN (block)
4378 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
4379 {
4380 BLOCK_FRAGMENT_CHAIN (block)
4381 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4382 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
4383 BLOCK_SAME_RANGE (block) = 0;
4384 }
4385 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4386 prev = block;
4387 }
4388 return prev;
4389}
4390
4391
4392/* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4393 and create duplicate blocks. */
4394/* ??? Need an option to either create block fragments or to create
4395 abstract origin duplicates of a source block. It really depends
4396 on what optimization has been performed. */
4397
4398void
4399reorder_blocks (void)
4400{
4401 tree block = DECL_INITIAL (current_function_decl);
4402
4403 if (block == NULL_TREE)
4404 return;
4405
4406 auto_vec<tree, 10> block_stack;
4407
4408 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4409 clear_block_marks (block);
4410
4411 /* Prune the old trees away, so that they don't get in the way. */
4412 BLOCK_SUBBLOCKS (block) = NULL_TREE;
4413 BLOCK_CHAIN (block) = NULL_TREE;
4414
4415 /* Recreate the block tree from the note nesting. */
4416 reorder_blocks_1 (get_insns (), block, &block_stack);
4417 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4418}
4419
4420/* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4421
4422void
4423clear_block_marks (tree block)
4424{
4425 while (block)
4426 {
4427 TREE_ASM_WRITTEN (block) = 0;
4428 clear_block_marks (BLOCK_SUBBLOCKS (block));
4429 block = BLOCK_CHAIN (block);
4430 }
4431}
4432
4433static void
4434reorder_blocks_1 (rtx_insn *insns, tree current_block,
4435 vec<tree> *p_block_stack)
4436{
4437 rtx_insn *insn;
4438 tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
4439
4440 for (insn = insns; insn; insn = NEXT_INSN (insn))
4441 {
4442 if (NOTE_P (insn))
4443 {
4444 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
4445 {
4446 tree block = NOTE_BLOCK (insn);
4447 tree origin;
4448
4449 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4450 origin = block;
4451
4452 if (prev_end)
4453 BLOCK_SAME_RANGE (prev_end) = 0;
4454 prev_end = NULL_TREE;
4455
4456 /* If we have seen this block before, that means it now
4457 spans multiple address regions. Create a new fragment. */
4458 if (TREE_ASM_WRITTEN (block))
4459 {
4460 tree new_block = copy_node (block);
4461
4462 BLOCK_SAME_RANGE (new_block) = 0;
4463 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4464 BLOCK_FRAGMENT_CHAIN (new_block)
4465 = BLOCK_FRAGMENT_CHAIN (origin);
4466 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4467
4468 NOTE_BLOCK (insn) = new_block;
4469 block = new_block;
4470 }
4471
4472 if (prev_beg == current_block && prev_beg)
4473 BLOCK_SAME_RANGE (block) = 1;
4474
4475 prev_beg = origin;
4476
4477 BLOCK_SUBBLOCKS (block) = 0;
4478 TREE_ASM_WRITTEN (block) = 1;
4479 /* When there's only one block for the entire function,
4480 current_block == block and we mustn't do this, it
4481 will cause infinite recursion. */
4482 if (block != current_block)
4483 {
4484 tree super;
4485 if (block != origin)
4486 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
4487 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4488 (origin))
4489 == current_block);
4490 if (p_block_stack->is_empty ())
4491 super = current_block;
4492 else
4493 {
4494 super = p_block_stack->last ();
4495 gcc_assert (super == current_block
4496 || BLOCK_FRAGMENT_ORIGIN (super)
4497 == current_block);
4498 }
4499 BLOCK_SUPERCONTEXT (block) = super;
4500 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4501 BLOCK_SUBBLOCKS (current_block) = block;
4502 current_block = origin;
4503 }
4504 p_block_stack->safe_push (obj: block);
4505 }
4506 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
4507 {
4508 NOTE_BLOCK (insn) = p_block_stack->pop ();
4509 current_block = BLOCK_SUPERCONTEXT (current_block);
4510 if (BLOCK_FRAGMENT_ORIGIN (current_block))
4511 current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
4512 prev_beg = NULL_TREE;
4513 prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
4514 ? NOTE_BLOCK (insn) : NULL_TREE;
4515 }
4516 }
4517 else
4518 {
4519 prev_beg = NULL_TREE;
4520 if (prev_end)
4521 BLOCK_SAME_RANGE (prev_end) = 0;
4522 prev_end = NULL_TREE;
4523 }
4524 }
4525}
4526
4527/* Reverse the order of elements in the chain T of blocks,
4528 and return the new head of the chain (old last element). */
4529
4530tree
4531blocks_nreverse (tree t)
4532{
4533 tree prev = 0, block, next;
4534 for (block = t; block; block = next)
4535 {
4536 next = BLOCK_CHAIN (block);
4537 BLOCK_CHAIN (block) = prev;
4538 prev = block;
4539 }
4540 return prev;
4541}
4542
4543/* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4544 by modifying the last node in chain 1 to point to chain 2. */
4545
4546tree
4547block_chainon (tree op1, tree op2)
4548{
4549 tree t1;
4550
4551 if (!op1)
4552 return op2;
4553 if (!op2)
4554 return op1;
4555
4556 for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4557 continue;
4558 BLOCK_CHAIN (t1) = op2;
4559
4560#ifdef ENABLE_TREE_CHECKING
4561 {
4562 tree t2;
4563 for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4564 gcc_assert (t2 != t1);
4565 }
4566#endif
4567
4568 return op1;
4569}
4570
4571/* Count the subblocks of the list starting with BLOCK. If VECTOR is
4572 non-NULL, list them all into VECTOR, in a depth-first preorder
4573 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4574 blocks. */
4575
4576static int
4577all_blocks (tree block, tree *vector)
4578{
4579 int n_blocks = 0;
4580
4581 while (block)
4582 {
4583 TREE_ASM_WRITTEN (block) = 0;
4584
4585 /* Record this block. */
4586 if (vector)
4587 vector[n_blocks] = block;
4588
4589 ++n_blocks;
4590
4591 /* Record the subblocks, and their subblocks... */
4592 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4593 vector: vector ? vector + n_blocks : 0);
4594 block = BLOCK_CHAIN (block);
4595 }
4596
4597 return n_blocks;
4598}
4599
4600/* Return a vector containing all the blocks rooted at BLOCK. The
4601 number of elements in the vector is stored in N_BLOCKS_P. The
4602 vector is dynamically allocated; it is the caller's responsibility
4603 to call `free' on the pointer returned. */
4604
4605static tree *
4606get_block_vector (tree block, int *n_blocks_p)
4607{
4608 tree *block_vector;
4609
4610 *n_blocks_p = all_blocks (block, NULL);
4611 block_vector = XNEWVEC (tree, *n_blocks_p);
4612 all_blocks (block, vector: block_vector);
4613
4614 return block_vector;
4615}
4616
4617static GTY(()) int next_block_index = 2;
4618
4619/* Set BLOCK_NUMBER for all the blocks in FN. */
4620
4621void
4622number_blocks (tree fn)
4623{
4624 int i;
4625 int n_blocks;
4626 tree *block_vector;
4627
4628 block_vector = get_block_vector (DECL_INITIAL (fn), n_blocks_p: &n_blocks);
4629
4630 /* The top-level BLOCK isn't numbered at all. */
4631 for (i = 1; i < n_blocks; ++i)
4632 /* We number the blocks from two. */
4633 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4634
4635 free (ptr: block_vector);
4636
4637 return;
4638}
4639
4640/* If VAR is present in a subblock of BLOCK, return the subblock. */
4641
4642DEBUG_FUNCTION tree
4643debug_find_var_in_block_tree (tree var, tree block)
4644{
4645 tree t;
4646
4647 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4648 if (t == var)
4649 return block;
4650
4651 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4652 {
4653 tree ret = debug_find_var_in_block_tree (var, block: t);
4654 if (ret)
4655 return ret;
4656 }
4657
4658 return NULL_TREE;
4659}
4660
4661/* Keep track of whether we're in a dummy function context. If we are,
4662 we don't want to invoke the set_current_function hook, because we'll
4663 get into trouble if the hook calls target_reinit () recursively or
4664 when the initial initialization is not yet complete. */
4665
4666static bool in_dummy_function;
4667
4668/* Invoke the target hook when setting cfun. Update the optimization options
4669 if the function uses different options than the default. */
4670
4671static void
4672invoke_set_current_function_hook (tree fndecl)
4673{
4674 if (!in_dummy_function)
4675 {
4676 tree opts = ((fndecl)
4677 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4678 : optimization_default_node);
4679
4680 if (!opts)
4681 opts = optimization_default_node;
4682
4683 /* Change optimization options if needed. */
4684 if (optimization_current_node != opts)
4685 {
4686 optimization_current_node = opts;
4687 cl_optimization_restore (&global_options, &global_options_set,
4688 TREE_OPTIMIZATION (opts));
4689 }
4690
4691 targetm.set_current_function (fndecl);
4692 this_fn_optabs = this_target_optabs;
4693
4694 /* Initialize global alignment variables after op. */
4695 parse_alignment_opts ();
4696
4697 if (opts != optimization_default_node)
4698 {
4699 init_tree_optimization_optabs (opts);
4700 if (TREE_OPTIMIZATION_OPTABS (opts))
4701 this_fn_optabs = (struct target_optabs *)
4702 TREE_OPTIMIZATION_OPTABS (opts);
4703 }
4704 }
4705}
4706
4707/* cfun should never be set directly; use this function. */
4708
4709void
4710set_cfun (struct function *new_cfun, bool force)
4711{
4712 if (cfun != new_cfun || force)
4713 {
4714 cfun = new_cfun;
4715 invoke_set_current_function_hook (fndecl: new_cfun ? new_cfun->decl : NULL_TREE);
4716 redirect_edge_var_map_empty ();
4717 }
4718}
4719
4720/* Initialized with NOGC, making this poisonous to the garbage collector. */
4721
4722static vec<function *> cfun_stack;
4723
4724/* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4725 current_function_decl accordingly. */
4726
4727void
4728push_cfun (struct function *new_cfun)
4729{
4730 gcc_assert ((!cfun && !current_function_decl)
4731 || (cfun && current_function_decl == cfun->decl));
4732 cfun_stack.safe_push (obj: cfun);
4733 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4734 set_cfun (new_cfun);
4735}
4736
4737/* Pop cfun from the stack. Also set current_function_decl accordingly. */
4738
4739void
4740pop_cfun (void)
4741{
4742 struct function *new_cfun = cfun_stack.pop ();
4743 /* When in_dummy_function, we do have a cfun but current_function_decl is
4744 NULL. We also allow pushing NULL cfun and subsequently changing
4745 current_function_decl to something else and have both restored by
4746 pop_cfun. */
4747 gcc_checking_assert (in_dummy_function
4748 || !cfun
4749 || current_function_decl == cfun->decl);
4750 set_cfun (new_cfun);
4751 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4752}
4753
4754/* Return value of funcdef and increase it. */
4755int
4756get_next_funcdef_no (void)
4757{
4758 return funcdef_no++;
4759}
4760
4761/* Return value of funcdef. */
4762int
4763get_last_funcdef_no (void)
4764{
4765 return funcdef_no;
4766}
4767
4768/* Allocate and initialize the stack usage info data structure for the
4769 current function. */
4770static void
4771allocate_stack_usage_info (void)
4772{
4773 gcc_assert (!cfun->su);
4774 cfun->su = ggc_cleared_alloc<stack_usage> ();
4775 cfun->su->static_stack_size = -1;
4776}
4777
4778/* Allocate a function structure for FNDECL and set its contents
4779 to the defaults. Set cfun to the newly-allocated object.
4780 Some of the helper functions invoked during initialization assume
4781 that cfun has already been set. Therefore, assign the new object
4782 directly into cfun and invoke the back end hook explicitly at the
4783 very end, rather than initializing a temporary and calling set_cfun
4784 on it.
4785
4786 ABSTRACT_P is true if this is a function that will never be seen by
4787 the middle-end. Such functions are front-end concepts (like C++
4788 function templates) that do not correspond directly to functions
4789 placed in object files. */
4790
4791void
4792allocate_struct_function (tree fndecl, bool abstract_p)
4793{
4794 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4795
4796 cfun = ggc_cleared_alloc<function> ();
4797
4798 init_eh_for_function ();
4799
4800 if (init_machine_status)
4801 cfun->machine = (*init_machine_status) ();
4802
4803#ifdef OVERRIDE_ABI_FORMAT
4804 OVERRIDE_ABI_FORMAT (fndecl);
4805#endif
4806
4807 if (fndecl != NULL_TREE)
4808 {
4809 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4810 cfun->decl = fndecl;
4811 current_function_funcdef_no = get_next_funcdef_no ();
4812 }
4813
4814 invoke_set_current_function_hook (fndecl);
4815
4816 if (fndecl != NULL_TREE)
4817 {
4818 tree result = DECL_RESULT (fndecl);
4819
4820 if (!abstract_p)
4821 {
4822 /* Now that we have activated any function-specific attributes
4823 that might affect layout, particularly vector modes, relayout
4824 each of the parameters and the result. */
4825 relayout_decl (result);
4826 for (tree parm = DECL_ARGUMENTS (fndecl); parm;
4827 parm = DECL_CHAIN (parm))
4828 relayout_decl (parm);
4829
4830 /* Similarly relayout the function decl. */
4831 targetm.target_option.relayout_function (fndecl);
4832 }
4833
4834 if (!abstract_p && aggregate_value_p (exp: result, fntype: fndecl))
4835 {
4836#ifdef PCC_STATIC_STRUCT_RETURN
4837 cfun->returns_pcc_struct = 1;
4838#endif
4839 cfun->returns_struct = 1;
4840 }
4841
4842 cfun->stdarg = stdarg_p (fntype);
4843
4844 /* Assume all registers in stdarg functions need to be saved. */
4845 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4846 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4847
4848 /* ??? This could be set on a per-function basis by the front-end
4849 but is this worth the hassle? */
4850 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
4851 cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
4852
4853 if (!profile_flag && !flag_instrument_function_entry_exit)
4854 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1;
4855
4856 if (flag_callgraph_info)
4857 allocate_stack_usage_info ();
4858 }
4859
4860 /* Don't enable begin stmt markers if var-tracking at assignments is
4861 disabled. The markers make little sense without the variable
4862 binding annotations among them. */
4863 cfun->debug_nonbind_markers = lang_hooks.emits_begin_stmt
4864 && MAY_HAVE_DEBUG_MARKER_STMTS;
4865}
4866
4867/* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4868 instead of just setting it. */
4869
4870void
4871push_struct_function (tree fndecl, bool abstract_p)
4872{
4873 /* When in_dummy_function we might be in the middle of a pop_cfun and
4874 current_function_decl and cfun may not match. */
4875 gcc_assert (in_dummy_function
4876 || (!cfun && !current_function_decl)
4877 || (cfun && current_function_decl == cfun->decl));
4878 cfun_stack.safe_push (obj: cfun);
4879 current_function_decl = fndecl;
4880 allocate_struct_function (fndecl, abstract_p);
4881}
4882
4883/* Reset crtl and other non-struct-function variables to defaults as
4884 appropriate for emitting rtl at the start of a function. */
4885
4886static void
4887prepare_function_start (void)
4888{
4889 gcc_assert (!get_last_insn ());
4890
4891 if (in_dummy_function)
4892 crtl->abi = &default_function_abi;
4893 else
4894 crtl->abi = &fndecl_abi (cfun->decl).base_abi ();
4895
4896 init_temp_slots ();
4897 init_emit ();
4898 init_varasm_status ();
4899 init_expr ();
4900 default_rtl_profile ();
4901
4902 if (flag_stack_usage_info && !flag_callgraph_info)
4903 allocate_stack_usage_info ();
4904
4905 cse_not_expected = ! optimize;
4906
4907 /* Caller save not needed yet. */
4908 caller_save_needed = 0;
4909
4910 /* We haven't done register allocation yet. */
4911 reg_renumber = 0;
4912
4913 /* Indicate that we have not instantiated virtual registers yet. */
4914 virtuals_instantiated = 0;
4915
4916 /* Indicate that we want CONCATs now. */
4917 generating_concat_p = 1;
4918
4919 /* Indicate we have no need of a frame pointer yet. */
4920 frame_pointer_needed = 0;
4921}
4922
4923void
4924push_dummy_function (bool with_decl)
4925{
4926 tree fn_decl, fn_type, fn_result_decl;
4927
4928 gcc_assert (!in_dummy_function);
4929 in_dummy_function = true;
4930
4931 if (with_decl)
4932 {
4933 fn_type = build_function_type_list (void_type_node, NULL_TREE);
4934 fn_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
4935 fn_type);
4936 fn_result_decl = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
4937 NULL_TREE, void_type_node);
4938 DECL_RESULT (fn_decl) = fn_result_decl;
4939 DECL_ARTIFICIAL (fn_decl) = 1;
4940 tree fn_name = get_identifier (" ");
4941 SET_DECL_ASSEMBLER_NAME (fn_decl, fn_name);
4942 }
4943 else
4944 fn_decl = NULL_TREE;
4945
4946 push_struct_function (fndecl: fn_decl);
4947}
4948
4949/* Initialize the rtl expansion mechanism so that we can do simple things
4950 like generate sequences. This is used to provide a context during global
4951 initialization of some passes. You must call expand_dummy_function_end
4952 to exit this context. */
4953
4954void
4955init_dummy_function_start (void)
4956{
4957 push_dummy_function (with_decl: false);
4958 prepare_function_start ();
4959}
4960
4961/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4962 and initialize static variables for generating RTL for the statements
4963 of the function. */
4964
4965void
4966init_function_start (tree subr)
4967{
4968 /* Initialize backend, if needed. */
4969 initialize_rtl ();
4970
4971 prepare_function_start ();
4972 decide_function_section (subr);
4973
4974 /* Warn if this value is an aggregate type,
4975 regardless of which calling convention we are using for it. */
4976 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4977 warning_at (DECL_SOURCE_LOCATION (DECL_RESULT (subr)),
4978 OPT_Waggregate_return, "function returns an aggregate");
4979}
4980
4981/* Expand code to verify the stack_protect_guard. This is invoked at
4982 the end of a function to be protected. */
4983
4984void
4985stack_protect_epilogue (void)
4986{
4987 tree guard_decl = crtl->stack_protect_guard_decl;
4988 rtx_code_label *label = gen_label_rtx ();
4989 rtx x, y;
4990 rtx_insn *seq = NULL;
4991
4992 x = expand_normal (crtl->stack_protect_guard);
4993
4994 if (targetm.have_stack_protect_combined_test () && guard_decl)
4995 {
4996 gcc_assert (DECL_P (guard_decl));
4997 y = DECL_RTL (guard_decl);
4998 /* Allow the target to compute address of Y and compare it with X without
4999 leaking Y into a register. This combined address + compare pattern
5000 allows the target to prevent spilling of any intermediate results by
5001 splitting it after register allocator. */
5002 seq = targetm.gen_stack_protect_combined_test (x, y, label);
5003 }
5004 else
5005 {
5006 if (guard_decl)
5007 y = expand_normal (exp: guard_decl);
5008 else
5009 y = const0_rtx;
5010
5011 /* Allow the target to compare Y with X without leaking either into
5012 a register. */
5013 if (targetm.have_stack_protect_test ())
5014 seq = targetm.gen_stack_protect_test (x, y, label);
5015 }
5016
5017 if (seq)
5018 emit_insn (seq);
5019 else
5020 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
5021
5022 /* The noreturn predictor has been moved to the tree level. The rtl-level
5023 predictors estimate this branch about 20%, which isn't enough to get
5024 things moved out of line. Since this is the only extant case of adding
5025 a noreturn function at the rtl level, it doesn't seem worth doing ought
5026 except adding the prediction by hand. */
5027 rtx_insn *tmp = get_last_insn ();
5028 if (JUMP_P (tmp))
5029 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
5030
5031 expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
5032 free_temp_slots ();
5033 emit_label (label);
5034}
5035
5036/* Start the RTL for a new function, and set variables used for
5037 emitting RTL.
5038 SUBR is the FUNCTION_DECL node.
5039 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5040 the function's parameters, which must be run at any return statement. */
5041
5042bool currently_expanding_function_start;
5043void
5044expand_function_start (tree subr)
5045{
5046 currently_expanding_function_start = true;
5047
5048 /* Make sure volatile mem refs aren't considered
5049 valid operands of arithmetic insns. */
5050 init_recog_no_volatile ();
5051
5052 crtl->profile
5053 = (profile_flag
5054 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5055
5056 crtl->limit_stack
5057 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
5058
5059 /* Make the label for return statements to jump to. Do not special
5060 case machines with special return instructions -- they will be
5061 handled later during jump, ifcvt, or epilogue creation. */
5062 return_label = gen_label_rtx ();
5063
5064 /* Initialize rtx used to return the value. */
5065 /* Do this before assign_parms so that we copy the struct value address
5066 before any library calls that assign parms might generate. */
5067
5068 /* Decide whether to return the value in memory or in a register. */
5069 tree res = DECL_RESULT (subr);
5070 if (aggregate_value_p (exp: res, fntype: subr))
5071 {
5072 /* Returning something that won't go in a register. */
5073 rtx value_address = 0;
5074
5075#ifdef PCC_STATIC_STRUCT_RETURN
5076 if (cfun->returns_pcc_struct)
5077 {
5078 int size = int_size_in_bytes (TREE_TYPE (res));
5079 value_address = assemble_static_space (size);
5080 }
5081 else
5082#endif
5083 {
5084 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
5085 /* Expect to be passed the address of a place to store the value.
5086 If it is passed as an argument, assign_parms will take care of
5087 it. */
5088 if (sv)
5089 {
5090 value_address = gen_reg_rtx (Pmode);
5091 emit_move_insn (value_address, sv);
5092 }
5093 }
5094 if (value_address)
5095 {
5096 rtx x = value_address;
5097 if (!DECL_BY_REFERENCE (res))
5098 {
5099 x = gen_rtx_MEM (DECL_MODE (res), x);
5100 set_mem_attributes (x, res, 1);
5101 }
5102 set_parm_rtl (res, x);
5103 }
5104 }
5105 else if (DECL_MODE (res) == VOIDmode)
5106 /* If return mode is void, this decl rtl should not be used. */
5107 set_parm_rtl (res, NULL_RTX);
5108 else
5109 {
5110 /* Compute the return values into a pseudo reg, which we will copy
5111 into the true return register after the cleanups are done. */
5112 tree return_type = TREE_TYPE (res);
5113
5114 /* If we may coalesce this result, make sure it has the expected mode
5115 in case it was promoted. But we need not bother about BLKmode. */
5116 machine_mode promoted_mode
5117 = flag_tree_coalesce_vars && is_gimple_reg (res)
5118 ? promote_ssa_mode (ssa_default_def (cfun, res), NULL)
5119 : BLKmode;
5120
5121 if (promoted_mode != BLKmode)
5122 set_parm_rtl (res, gen_reg_rtx (promoted_mode));
5123 else if (TYPE_MODE (return_type) != BLKmode
5124 && targetm.calls.return_in_msb (return_type))
5125 /* expand_function_end will insert the appropriate padding in
5126 this case. Use the return value's natural (unpadded) mode
5127 within the function proper. */
5128 set_parm_rtl (res, gen_reg_rtx (TYPE_MODE (return_type)));
5129 else
5130 {
5131 /* In order to figure out what mode to use for the pseudo, we
5132 figure out what the mode of the eventual return register will
5133 actually be, and use that. */
5134 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
5135
5136 /* Structures that are returned in registers are not
5137 aggregate_value_p, so we may see a PARALLEL or a REG. */
5138 if (REG_P (hard_reg))
5139 set_parm_rtl (res, gen_reg_rtx (GET_MODE (hard_reg)));
5140 else
5141 {
5142 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
5143 set_parm_rtl (res, gen_group_rtx (hard_reg));
5144 }
5145 }
5146
5147 /* Set DECL_REGISTER flag so that expand_function_end will copy the
5148 result to the real return register(s). */
5149 DECL_REGISTER (res) = 1;
5150 }
5151
5152 /* Initialize rtx for parameters and local variables.
5153 In some cases this requires emitting insns. */
5154 assign_parms (fndecl: subr);
5155
5156 /* If function gets a static chain arg, store it. */
5157 if (cfun->static_chain_decl)
5158 {
5159 tree parm = cfun->static_chain_decl;
5160 rtx local, chain;
5161 rtx_insn *insn;
5162 int unsignedp;
5163
5164 local = gen_reg_rtx (promote_decl_mode (parm, &unsignedp));
5165 chain = targetm.calls.static_chain (current_function_decl, true);
5166
5167 set_decl_incoming_rtl (parm, chain, false);
5168 set_parm_rtl (parm, local);
5169 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
5170
5171 if (GET_MODE (local) != GET_MODE (chain))
5172 {
5173 convert_move (local, chain, unsignedp);
5174 insn = get_last_insn ();
5175 }
5176 else
5177 insn = emit_move_insn (local, chain);
5178
5179 /* Mark the register as eliminable, similar to parameters. */
5180 if (MEM_P (chain)
5181 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
5182 set_dst_reg_note (insn, REG_EQUIV, chain, local);
5183
5184 /* If we aren't optimizing, save the static chain onto the stack. */
5185 if (!optimize)
5186 {
5187 tree saved_static_chain_decl
5188 = build_decl (DECL_SOURCE_LOCATION (parm), VAR_DECL,
5189 DECL_NAME (parm), TREE_TYPE (parm));
5190 rtx saved_static_chain_rtx
5191 = assign_stack_local (Pmode, size: GET_MODE_SIZE (Pmode), align: 0);
5192 SET_DECL_RTL (saved_static_chain_decl, saved_static_chain_rtx);
5193 emit_move_insn (saved_static_chain_rtx, chain);
5194 SET_DECL_VALUE_EXPR (parm, saved_static_chain_decl);
5195 DECL_HAS_VALUE_EXPR_P (parm) = 1;
5196 }
5197 }
5198
5199 /* The following was moved from init_function_start.
5200 The move was supposed to make sdb output more accurate. */
5201 /* Indicate the beginning of the function body,
5202 as opposed to parm setup. */
5203 emit_note (NOTE_INSN_FUNCTION_BEG);
5204
5205 gcc_assert (NOTE_P (get_last_insn ()));
5206
5207 function_beg_insn = parm_birth_insn = get_last_insn ();
5208
5209 /* If the function receives a non-local goto, then store the
5210 bits we need to restore the frame pointer. */
5211 if (cfun->nonlocal_goto_save_area)
5212 {
5213 tree t_save;
5214 rtx r_save;
5215
5216 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
5217 gcc_assert (DECL_RTL_SET_P (var));
5218
5219 t_save = build4 (ARRAY_REF,
5220 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
5221 cfun->nonlocal_goto_save_area,
5222 integer_zero_node, NULL_TREE, NULL_TREE);
5223 r_save = expand_expr (exp: t_save, NULL_RTX, VOIDmode, modifier: EXPAND_WRITE);
5224 gcc_assert (GET_MODE (r_save) == Pmode);
5225
5226 emit_move_insn (r_save, hard_frame_pointer_rtx);
5227 update_nonlocal_goto_save_area ();
5228 }
5229
5230 if (crtl->profile)
5231 {
5232#ifdef PROFILE_HOOK
5233 PROFILE_HOOK (current_function_funcdef_no);
5234#endif
5235 }
5236
5237 /* If we are doing generic stack checking, the probe should go here. */
5238 if (flag_stack_check == GENERIC_STACK_CHECK)
5239 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
5240
5241 currently_expanding_function_start = false;
5242}
5243
5244void
5245pop_dummy_function (void)
5246{
5247 pop_cfun ();
5248 in_dummy_function = false;
5249}
5250
5251/* Undo the effects of init_dummy_function_start. */
5252void
5253expand_dummy_function_end (void)
5254{
5255 gcc_assert (in_dummy_function);
5256
5257 /* End any sequences that failed to be closed due to syntax errors. */
5258 while (in_sequence_p ())
5259 end_sequence ();
5260
5261 /* Outside function body, can't compute type's actual size
5262 until next function's body starts. */
5263
5264 free_after_parsing (f: cfun);
5265 free_after_compilation (f: cfun);
5266 pop_dummy_function ();
5267}
5268
5269/* Helper for diddle_return_value. */
5270
5271void
5272diddle_return_value_1 (void (*doit) (rtx, void *), void *arg, rtx outgoing)
5273{
5274 if (! outgoing)
5275 return;
5276
5277 if (REG_P (outgoing))
5278 (*doit) (outgoing, arg);
5279 else if (GET_CODE (outgoing) == PARALLEL)
5280 {
5281 int i;
5282
5283 for (i = 0; i < XVECLEN (outgoing, 0); i++)
5284 {
5285 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
5286
5287 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5288 (*doit) (x, arg);
5289 }
5290 }
5291}
5292
5293/* Call DOIT for each hard register used as a return value from
5294 the current function. */
5295
5296void
5297diddle_return_value (void (*doit) (rtx, void *), void *arg)
5298{
5299 diddle_return_value_1 (doit, arg, crtl->return_rtx);
5300}
5301
5302static void
5303do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5304{
5305 emit_clobber (reg);
5306}
5307
5308void
5309clobber_return_register (void)
5310{
5311 diddle_return_value (doit: do_clobber_return_reg, NULL);
5312
5313 /* In case we do use pseudo to return value, clobber it too. */
5314 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5315 {
5316 tree decl_result = DECL_RESULT (current_function_decl);
5317 rtx decl_rtl = DECL_RTL (decl_result);
5318 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
5319 {
5320 do_clobber_return_reg (reg: decl_rtl, NULL);
5321 }
5322 }
5323}
5324
5325static void
5326do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5327{
5328 emit_use (reg);
5329}
5330
5331static void
5332use_return_register (void)
5333{
5334 diddle_return_value (doit: do_use_return_reg, NULL);
5335}
5336
5337/* Generate RTL for the end of the current function. */
5338
5339void
5340expand_function_end (void)
5341{
5342 /* If arg_pointer_save_area was referenced only from a nested
5343 function, we will not have initialized it yet. Do that now. */
5344 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
5345 get_arg_pointer_save_area ();
5346
5347 /* If we are doing generic stack checking and this function makes calls,
5348 do a stack probe at the start of the function to ensure we have enough
5349 space for another stack frame. */
5350 if (flag_stack_check == GENERIC_STACK_CHECK)
5351 {
5352 rtx_insn *insn, *seq;
5353
5354 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5355 if (CALL_P (insn))
5356 {
5357 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
5358 start_sequence ();
5359 if (STACK_CHECK_MOVING_SP)
5360 anti_adjust_stack_and_probe (max_frame_size, true);
5361 else
5362 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
5363 seq = get_insns ();
5364 end_sequence ();
5365 set_insn_locations (seq, prologue_location);
5366 emit_insn_before (seq, stack_check_probe_note);
5367 break;
5368 }
5369 }
5370
5371 /* End any sequences that failed to be closed due to syntax errors. */
5372 while (in_sequence_p ())
5373 end_sequence ();
5374
5375 clear_pending_stack_adjust ();
5376 do_pending_stack_adjust ();
5377
5378 /* Output a linenumber for the end of the function.
5379 SDB depended on this. */
5380 set_curr_insn_location (input_location);
5381
5382 /* Before the return label (if any), clobber the return
5383 registers so that they are not propagated live to the rest of
5384 the function. This can only happen with functions that drop
5385 through; if there had been a return statement, there would
5386 have either been a return rtx, or a jump to the return label.
5387
5388 We delay actual code generation after the current_function_value_rtx
5389 is computed. */
5390 rtx_insn *clobber_after = get_last_insn ();
5391
5392 /* Output the label for the actual return from the function. */
5393 emit_label (return_label);
5394
5395 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
5396 {
5397 /* Let except.cc know where it should emit the call to unregister
5398 the function context for sjlj exceptions. */
5399 if (flag_exceptions)
5400 sjlj_emit_function_exit_after (get_last_insn ());
5401 }
5402
5403 /* If this is an implementation of throw, do what's necessary to
5404 communicate between __builtin_eh_return and the epilogue. */
5405 expand_eh_return ();
5406
5407 /* If stack protection is enabled for this function, check the guard. */
5408 if (crtl->stack_protect_guard
5409 && targetm.stack_protect_runtime_enabled_p ()
5410 && naked_return_label == NULL_RTX)
5411 stack_protect_epilogue ();
5412
5413 /* If scalar return value was computed in a pseudo-reg, or was a named
5414 return value that got dumped to the stack, copy that to the hard
5415 return register. */
5416 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5417 {
5418 tree decl_result = DECL_RESULT (current_function_decl);
5419 rtx decl_rtl = DECL_RTL (decl_result);
5420
5421 if ((REG_P (decl_rtl)
5422 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5423 : DECL_REGISTER (decl_result))
5424 /* Unless the psABI says not to. */
5425 && !TYPE_EMPTY_P (TREE_TYPE (decl_result)))
5426 {
5427 rtx real_decl_rtl = crtl->return_rtx;
5428 complex_mode cmode;
5429
5430 /* This should be set in assign_parms. */
5431 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
5432
5433 /* If this is a BLKmode structure being returned in registers,
5434 then use the mode computed in expand_return. Note that if
5435 decl_rtl is memory, then its mode may have been changed,
5436 but that crtl->return_rtx has not. */
5437 if (GET_MODE (real_decl_rtl) == BLKmode)
5438 PUT_MODE (x: real_decl_rtl, GET_MODE (decl_rtl));
5439
5440 /* If a non-BLKmode return value should be padded at the least
5441 significant end of the register, shift it left by the appropriate
5442 amount. BLKmode results are handled using the group load/store
5443 machinery. */
5444 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
5445 && REG_P (real_decl_rtl)
5446 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5447 {
5448 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5449 REGNO (real_decl_rtl)),
5450 decl_rtl);
5451 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5452 }
5453 else if (GET_CODE (real_decl_rtl) == PARALLEL)
5454 {
5455 /* If expand_function_start has created a PARALLEL for decl_rtl,
5456 move the result to the real return registers. Otherwise, do
5457 a group load from decl_rtl for a named return. */
5458 if (GET_CODE (decl_rtl) == PARALLEL)
5459 emit_group_move (real_decl_rtl, decl_rtl);
5460 else
5461 emit_group_load (real_decl_rtl, decl_rtl,
5462 TREE_TYPE (decl_result),
5463 int_size_in_bytes (TREE_TYPE (decl_result)));
5464 }
5465 /* In the case of complex integer modes smaller than a word, we'll
5466 need to generate some non-trivial bitfield insertions. Do that
5467 on a pseudo and not the hard register. */
5468 else if (GET_CODE (decl_rtl) == CONCAT
5469 && is_complex_int_mode (GET_MODE (decl_rtl), cmode: &cmode)
5470 && GET_MODE_BITSIZE (mode: cmode) <= BITS_PER_WORD)
5471 {
5472 int old_generating_concat_p;
5473 rtx tmp;
5474
5475 old_generating_concat_p = generating_concat_p;
5476 generating_concat_p = 0;
5477 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5478 generating_concat_p = old_generating_concat_p;
5479
5480 emit_move_insn (tmp, decl_rtl);
5481 emit_move_insn (real_decl_rtl, tmp);
5482 }
5483 /* If a named return value dumped decl_return to memory, then
5484 we may need to re-do the PROMOTE_MODE signed/unsigned
5485 extension. */
5486 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
5487 {
5488 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
5489 promote_function_mode (TREE_TYPE (decl_result),
5490 GET_MODE (decl_rtl), &unsignedp,
5491 TREE_TYPE (current_function_decl), 1);
5492
5493 convert_move (real_decl_rtl, decl_rtl, unsignedp);
5494 }
5495 else
5496 emit_move_insn (real_decl_rtl, decl_rtl);
5497 }
5498 }
5499
5500 /* If returning a structure, arrange to return the address of the value
5501 in a place where debuggers expect to find it.
5502
5503 If returning a structure PCC style,
5504 the caller also depends on this value.
5505 And cfun->returns_pcc_struct is not necessarily set. */
5506 if ((cfun->returns_struct || cfun->returns_pcc_struct)
5507 && !targetm.calls.omit_struct_return_reg)
5508 {
5509 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
5510 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5511 rtx outgoing;
5512
5513 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5514 type = TREE_TYPE (type);
5515 else
5516 value_address = XEXP (value_address, 0);
5517
5518 outgoing = targetm.calls.function_value (build_pointer_type (type),
5519 current_function_decl, true);
5520
5521 /* Mark this as a function return value so integrate will delete the
5522 assignment and USE below when inlining this function. */
5523 REG_FUNCTION_VALUE_P (outgoing) = 1;
5524
5525 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5526 scalar_int_mode mode = as_a <scalar_int_mode> (GET_MODE (outgoing));
5527 value_address = convert_memory_address (mode, value_address);
5528
5529 emit_move_insn (outgoing, value_address);
5530
5531 /* Show return register used to hold result (in this case the address
5532 of the result. */
5533 crtl->return_rtx = outgoing;
5534 }
5535
5536 /* Emit the actual code to clobber return register. Don't emit
5537 it if clobber_after is a barrier, then the previous basic block
5538 certainly doesn't fall thru into the exit block. */
5539 if (!BARRIER_P (clobber_after))
5540 {
5541 start_sequence ();
5542 clobber_return_register ();
5543 rtx_insn *seq = get_insns ();
5544 end_sequence ();
5545
5546 emit_insn_after (seq, clobber_after);
5547 }
5548
5549 /* Output the label for the naked return from the function. */
5550 if (naked_return_label)
5551 emit_label (naked_return_label);
5552
5553 /* @@@ This is a kludge. We want to ensure that instructions that
5554 may trap are not moved into the epilogue by scheduling, because
5555 we don't always emit unwind information for the epilogue. */
5556 if (cfun->can_throw_non_call_exceptions
5557 && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
5558 emit_insn (gen_blockage ());
5559
5560 /* If stack protection is enabled for this function, check the guard. */
5561 if (crtl->stack_protect_guard
5562 && targetm.stack_protect_runtime_enabled_p ()
5563 && naked_return_label)
5564 stack_protect_epilogue ();
5565
5566 /* If we had calls to alloca, and this machine needs
5567 an accurate stack pointer to exit the function,
5568 insert some code to save and restore the stack pointer. */
5569 if (! EXIT_IGNORE_STACK
5570 && cfun->calls_alloca)
5571 {
5572 rtx tem = 0;
5573
5574 start_sequence ();
5575 emit_stack_save (SAVE_FUNCTION, &tem);
5576 rtx_insn *seq = get_insns ();
5577 end_sequence ();
5578 emit_insn_before (seq, parm_birth_insn);
5579
5580 emit_stack_restore (SAVE_FUNCTION, tem);
5581 }
5582
5583 /* ??? This should no longer be necessary since stupid is no longer with
5584 us, but there are some parts of the compiler (eg reload_combine, and
5585 sh mach_dep_reorg) that still try and compute their own lifetime info
5586 instead of using the general framework. */
5587 use_return_register ();
5588}
5589
5590rtx
5591get_arg_pointer_save_area (void)
5592{
5593 rtx ret = arg_pointer_save_area;
5594
5595 if (! ret)
5596 {
5597 ret = assign_stack_local (Pmode, size: GET_MODE_SIZE (Pmode), align: 0);
5598 arg_pointer_save_area = ret;
5599 }
5600
5601 if (! crtl->arg_pointer_save_area_init)
5602 {
5603 /* Save the arg pointer at the beginning of the function. The
5604 generated stack slot may not be a valid memory address, so we
5605 have to check it and fix it if necessary. */
5606 start_sequence ();
5607 emit_move_insn (validize_mem (copy_rtx (ret)),
5608 crtl->args.internal_arg_pointer);
5609 rtx_insn *seq = get_insns ();
5610 end_sequence ();
5611
5612 push_topmost_sequence ();
5613 emit_insn_after (seq, entry_of_function ());
5614 pop_topmost_sequence ();
5615
5616 crtl->arg_pointer_save_area_init = true;
5617 }
5618
5619 return ret;
5620}
5621
5622
5623/* If debugging dumps are requested, dump information about how the
5624 target handled -fstack-check=clash for the prologue.
5625
5626 PROBES describes what if any probes were emitted.
5627
5628 RESIDUALS indicates if the prologue had any residual allocation
5629 (i.e. total allocation was not a multiple of PROBE_INTERVAL). */
5630
5631void
5632dump_stack_clash_frame_info (enum stack_clash_probes probes, bool residuals)
5633{
5634 if (!dump_file)
5635 return;
5636
5637 switch (probes)
5638 {
5639 case NO_PROBE_NO_FRAME:
5640 fprintf (stream: dump_file,
5641 format: "Stack clash no probe no stack adjustment in prologue.\n");
5642 break;
5643 case NO_PROBE_SMALL_FRAME:
5644 fprintf (stream: dump_file,
5645 format: "Stack clash no probe small stack adjustment in prologue.\n");
5646 break;
5647 case PROBE_INLINE:
5648 fprintf (stream: dump_file, format: "Stack clash inline probes in prologue.\n");
5649 break;
5650 case PROBE_LOOP:
5651 fprintf (stream: dump_file, format: "Stack clash probe loop in prologue.\n");
5652 break;
5653 }
5654
5655 if (residuals)
5656 fprintf (stream: dump_file, format: "Stack clash residual allocation in prologue.\n");
5657 else
5658 fprintf (stream: dump_file, format: "Stack clash no residual allocation in prologue.\n");
5659
5660 if (frame_pointer_needed)
5661 fprintf (stream: dump_file, format: "Stack clash frame pointer needed.\n");
5662 else
5663 fprintf (stream: dump_file, format: "Stack clash no frame pointer needed.\n");
5664
5665 if (TREE_THIS_VOLATILE (cfun->decl))
5666 fprintf (stream: dump_file,
5667 format: "Stack clash noreturn prologue, assuming no implicit"
5668 " probes in caller.\n");
5669 else
5670 fprintf (stream: dump_file,
5671 format: "Stack clash not noreturn prologue.\n");
5672}
5673
5674/* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5675 for the first time. */
5676
5677static void
5678record_insns (rtx_insn *insns, rtx end, hash_table<insn_cache_hasher> **hashp)
5679{
5680 rtx_insn *tmp;
5681 hash_table<insn_cache_hasher> *hash = *hashp;
5682
5683 if (hash == NULL)
5684 *hashp = hash = hash_table<insn_cache_hasher>::create_ggc (n: 17);
5685
5686 for (tmp = insns; tmp != end; tmp = NEXT_INSN (insn: tmp))
5687 {
5688 rtx *slot = hash->find_slot (value: tmp, insert: INSERT);
5689 gcc_assert (*slot == NULL);
5690 *slot = tmp;
5691 }
5692}
5693
5694/* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5695 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5696 insn, then record COPY as well. */
5697
5698void
5699maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
5700{
5701 hash_table<insn_cache_hasher> *hash;
5702 rtx *slot;
5703
5704 hash = epilogue_insn_hash;
5705 if (!hash || !hash->find (value: insn))
5706 {
5707 hash = prologue_insn_hash;
5708 if (!hash || !hash->find (value: insn))
5709 return;
5710 }
5711
5712 slot = hash->find_slot (value: copy, insert: INSERT);
5713 gcc_assert (*slot == NULL);
5714 *slot = copy;
5715}
5716
5717/* Determine if any INSNs in HASH are, or are part of, INSN. Because
5718 we can be running after reorg, SEQUENCE rtl is possible. */
5719
5720static bool
5721contains (const rtx_insn *insn, hash_table<insn_cache_hasher> *hash)
5722{
5723 if (hash == NULL)
5724 return false;
5725
5726 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
5727 {
5728 rtx_sequence *seq = as_a <rtx_sequence *> (p: PATTERN (insn));
5729 int i;
5730 for (i = seq->len () - 1; i >= 0; i--)
5731 if (hash->find (value: seq->element (index: i)))
5732 return true;
5733 return false;
5734 }
5735
5736 return hash->find (value: const_cast<rtx_insn *> (insn)) != NULL;
5737}
5738
5739bool
5740prologue_contains (const rtx_insn *insn)
5741{
5742 return contains (insn, hash: prologue_insn_hash);
5743}
5744
5745bool
5746epilogue_contains (const rtx_insn *insn)
5747{
5748 return contains (insn, hash: epilogue_insn_hash);
5749}
5750
5751bool
5752prologue_epilogue_contains (const rtx_insn *insn)
5753{
5754 if (contains (insn, hash: prologue_insn_hash))
5755 return true;
5756 if (contains (insn, hash: epilogue_insn_hash))
5757 return true;
5758 return false;
5759}
5760
5761void
5762record_prologue_seq (rtx_insn *seq)
5763{
5764 record_insns (insns: seq, NULL, hashp: &prologue_insn_hash);
5765}
5766
5767void
5768record_epilogue_seq (rtx_insn *seq)
5769{
5770 record_insns (insns: seq, NULL, hashp: &epilogue_insn_hash);
5771}
5772
5773/* Set JUMP_LABEL for a return insn. */
5774
5775void
5776set_return_jump_label (rtx_insn *returnjump)
5777{
5778 rtx pat = PATTERN (insn: returnjump);
5779 if (GET_CODE (pat) == PARALLEL)
5780 pat = XVECEXP (pat, 0, 0);
5781 if (ANY_RETURN_P (pat))
5782 JUMP_LABEL (returnjump) = pat;
5783 else
5784 JUMP_LABEL (returnjump) = ret_rtx;
5785}
5786
5787/* Return a sequence to be used as the split prologue for the current
5788 function, or NULL. */
5789
5790static rtx_insn *
5791make_split_prologue_seq (void)
5792{
5793 if (!flag_split_stack
5794 || lookup_attribute (attr_name: "no_split_stack", DECL_ATTRIBUTES (cfun->decl)))
5795 return NULL;
5796
5797 start_sequence ();
5798 emit_insn (targetm.gen_split_stack_prologue ());
5799 rtx_insn *seq = get_insns ();
5800 end_sequence ();
5801
5802 record_insns (insns: seq, NULL, hashp: &prologue_insn_hash);
5803 set_insn_locations (seq, prologue_location);
5804
5805 return seq;
5806}
5807
5808/* Return a sequence to be used as the prologue for the current function,
5809 or NULL. */
5810
5811static rtx_insn *
5812make_prologue_seq (void)
5813{
5814 if (!targetm.have_prologue ())
5815 return NULL;
5816
5817 start_sequence ();
5818 rtx_insn *seq = targetm.gen_prologue ();
5819 emit_insn (seq);
5820
5821 /* Insert an explicit USE for the frame pointer
5822 if the profiling is on and the frame pointer is required. */
5823 if (crtl->profile && frame_pointer_needed)
5824 emit_use (hard_frame_pointer_rtx);
5825
5826 /* Retain a map of the prologue insns. */
5827 record_insns (insns: seq, NULL, hashp: &prologue_insn_hash);
5828 emit_note (NOTE_INSN_PROLOGUE_END);
5829
5830 /* Ensure that instructions are not moved into the prologue when
5831 profiling is on. The call to the profiling routine can be
5832 emitted within the live range of a call-clobbered register. */
5833 if (!targetm.profile_before_prologue () && crtl->profile)
5834 emit_insn (gen_blockage ());
5835
5836 seq = get_insns ();
5837 end_sequence ();
5838 set_insn_locations (seq, prologue_location);
5839
5840 return seq;
5841}
5842
5843/* Emit a sequence of insns to zero the call-used registers before RET
5844 according to ZERO_REGS_TYPE. */
5845
5846static void
5847gen_call_used_regs_seq (rtx_insn *ret, unsigned int zero_regs_type)
5848{
5849 bool only_gpr = true;
5850 bool only_used = true;
5851 bool only_arg = true;
5852
5853 /* No need to zero call-used-regs in main (). */
5854 if (MAIN_NAME_P (DECL_NAME (current_function_decl)))
5855 return;
5856
5857 /* No need to zero call-used-regs if __builtin_eh_return is called
5858 since it isn't a normal function return. */
5859 if (crtl->calls_eh_return)
5860 return;
5861
5862 /* If only_gpr is true, only zero call-used registers that are
5863 general-purpose registers; if only_used is true, only zero
5864 call-used registers that are used in the current function;
5865 if only_arg is true, only zero call-used registers that pass
5866 parameters defined by the flatform's calling conversion. */
5867
5868 using namespace zero_regs_flags;
5869
5870 only_gpr = zero_regs_type & ONLY_GPR;
5871 only_used = zero_regs_type & ONLY_USED;
5872 only_arg = zero_regs_type & ONLY_ARG;
5873
5874 if ((zero_regs_type & LEAFY_MODE) && leaf_function_p ())
5875 only_used = true;
5876
5877 /* For each of the hard registers, we should zero it if:
5878 1. it is a call-used register;
5879 and 2. it is not a fixed register;
5880 and 3. it is not live at the return of the routine;
5881 and 4. it is general registor if only_gpr is true;
5882 and 5. it is used in the routine if only_used is true;
5883 and 6. it is a register that passes parameter if only_arg is true. */
5884
5885 /* First, prepare the data flow information. */
5886 basic_block bb = BLOCK_FOR_INSN (insn: ret);
5887 auto_bitmap live_out;
5888 bitmap_copy (live_out, df_get_live_out (bb));
5889 df_simulate_initialize_backwards (bb, live_out);
5890 df_simulate_one_insn_backwards (bb, ret, live_out);
5891
5892 HARD_REG_SET selected_hardregs;
5893 HARD_REG_SET all_call_used_regs;
5894 CLEAR_HARD_REG_SET (set&: selected_hardregs);
5895 CLEAR_HARD_REG_SET (set&: all_call_used_regs);
5896 for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5897 {
5898 if (!crtl->abi->clobbers_full_reg_p (regno))
5899 continue;
5900 if (fixed_regs[regno])
5901 continue;
5902 if (REGNO_REG_SET_P (live_out, regno))
5903 continue;
5904#ifdef LEAF_REG_REMAP
5905 if (crtl->uses_only_leaf_regs && LEAF_REG_REMAP (regno) < 0)
5906 continue;
5907#endif
5908 /* This is a call used register that is dead at return. */
5909 SET_HARD_REG_BIT (set&: all_call_used_regs, bit: regno);
5910
5911 if (only_gpr
5912 && !TEST_HARD_REG_BIT (reg_class_contents[GENERAL_REGS], bit: regno))
5913 continue;
5914 if (only_used && !df_regs_ever_live_p (regno))
5915 continue;
5916 if (only_arg && !FUNCTION_ARG_REGNO_P (regno))
5917 continue;
5918
5919 /* Now this is a register that we might want to zero. */
5920 SET_HARD_REG_BIT (set&: selected_hardregs, bit: regno);
5921 }
5922
5923 if (hard_reg_set_empty_p (x: selected_hardregs))
5924 return;
5925
5926 /* Now that we have a hard register set that needs to be zeroed, pass it to
5927 target to generate zeroing sequence. */
5928 HARD_REG_SET zeroed_hardregs;
5929 start_sequence ();
5930 zeroed_hardregs = targetm.calls.zero_call_used_regs (selected_hardregs);
5931
5932 /* For most targets, the returned set of registers is a subset of
5933 selected_hardregs, however, for some of the targets (for example MIPS),
5934 clearing some registers that are in selected_hardregs requires clearing
5935 other call used registers that are not in the selected_hardregs, under
5936 such situation, the returned set of registers must be a subset of
5937 all call used registers. */
5938 gcc_assert (hard_reg_set_subset_p (zeroed_hardregs, all_call_used_regs));
5939
5940 rtx_insn *seq = get_insns ();
5941 end_sequence ();
5942 if (seq)
5943 {
5944 /* Emit the memory blockage and register clobber asm volatile before
5945 the whole sequence. */
5946 start_sequence ();
5947 expand_asm_reg_clobber_mem_blockage (zeroed_hardregs);
5948 rtx_insn *seq_barrier = get_insns ();
5949 end_sequence ();
5950
5951 emit_insn_before (seq_barrier, ret);
5952 emit_insn_before (seq, ret);
5953
5954 /* Update the data flow information. */
5955 crtl->must_be_zero_on_return |= zeroed_hardregs;
5956 df_update_exit_block_uses ();
5957 }
5958}
5959
5960
5961/* Return a sequence to be used as the epilogue for the current function,
5962 or NULL. */
5963
5964static rtx_insn *
5965make_epilogue_seq (void)
5966{
5967 if (!targetm.have_epilogue ())
5968 return NULL;
5969
5970 start_sequence ();
5971 emit_note (NOTE_INSN_EPILOGUE_BEG);
5972 rtx_insn *seq = targetm.gen_epilogue ();
5973 if (seq)
5974 emit_jump_insn (seq);
5975
5976 /* Retain a map of the epilogue insns. */
5977 record_insns (insns: seq, NULL, hashp: &epilogue_insn_hash);
5978 set_insn_locations (seq, epilogue_location);
5979
5980 seq = get_insns ();
5981 rtx_insn *returnjump = get_last_insn ();
5982 end_sequence ();
5983
5984 if (JUMP_P (returnjump))
5985 set_return_jump_label (returnjump);
5986
5987 return seq;
5988}
5989
5990
5991/* Generate the prologue and epilogue RTL if the machine supports it. Thread
5992 this into place with notes indicating where the prologue ends and where
5993 the epilogue begins. Update the basic block information when possible.
5994
5995 Notes on epilogue placement:
5996 There are several kinds of edges to the exit block:
5997 * a single fallthru edge from LAST_BB
5998 * possibly, edges from blocks containing sibcalls
5999 * possibly, fake edges from infinite loops
6000
6001 The epilogue is always emitted on the fallthru edge from the last basic
6002 block in the function, LAST_BB, into the exit block.
6003
6004 If LAST_BB is empty except for a label, it is the target of every
6005 other basic block in the function that ends in a return. If a
6006 target has a return or simple_return pattern (possibly with
6007 conditional variants), these basic blocks can be changed so that a
6008 return insn is emitted into them, and their target is adjusted to
6009 the real exit block.
6010
6011 Notes on shrink wrapping: We implement a fairly conservative
6012 version of shrink-wrapping rather than the textbook one. We only
6013 generate a single prologue and a single epilogue. This is
6014 sufficient to catch a number of interesting cases involving early
6015 exits.
6016
6017 First, we identify the blocks that require the prologue to occur before
6018 them. These are the ones that modify a call-saved register, or reference
6019 any of the stack or frame pointer registers. To simplify things, we then
6020 mark everything reachable from these blocks as also requiring a prologue.
6021 This takes care of loops automatically, and avoids the need to examine
6022 whether MEMs reference the frame, since it is sufficient to check for
6023 occurrences of the stack or frame pointer.
6024
6025 We then compute the set of blocks for which the need for a prologue
6026 is anticipatable (borrowing terminology from the shrink-wrapping
6027 description in Muchnick's book). These are the blocks which either
6028 require a prologue themselves, or those that have only successors
6029 where the prologue is anticipatable. The prologue needs to be
6030 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
6031 is not. For the moment, we ensure that only one such edge exists.
6032
6033 The epilogue is placed as described above, but we make a
6034 distinction between inserting return and simple_return patterns
6035 when modifying other blocks that end in a return. Blocks that end
6036 in a sibcall omit the sibcall_epilogue if the block is not in
6037 ANTIC. */
6038
6039void
6040thread_prologue_and_epilogue_insns (void)
6041{
6042 df_analyze ();
6043
6044 /* Can't deal with multiple successors of the entry block at the
6045 moment. Function should always have at least one entry
6046 point. */
6047 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6048
6049 edge entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
6050 edge orig_entry_edge = entry_edge;
6051
6052 rtx_insn *split_prologue_seq = make_split_prologue_seq ();
6053 rtx_insn *prologue_seq = make_prologue_seq ();
6054 rtx_insn *epilogue_seq = make_epilogue_seq ();
6055
6056 /* Try to perform a kind of shrink-wrapping, making sure the
6057 prologue/epilogue is emitted only around those parts of the
6058 function that require it. */
6059 try_shrink_wrapping (entry_edge: &entry_edge, prologue_seq);
6060
6061 /* If the target can handle splitting the prologue/epilogue into separate
6062 components, try to shrink-wrap these components separately. */
6063 try_shrink_wrapping_separate (first_bb: entry_edge->dest);
6064
6065 /* If that did anything for any component we now need the generate the
6066 "main" prologue again. Because some targets require some of these
6067 to be called in a specific order (i386 requires the split prologue
6068 to be first, for example), we create all three sequences again here.
6069 If this does not work for some target, that target should not enable
6070 separate shrink-wrapping. */
6071 if (crtl->shrink_wrapped_separate)
6072 {
6073 split_prologue_seq = make_split_prologue_seq ();
6074 prologue_seq = make_prologue_seq ();
6075 epilogue_seq = make_epilogue_seq ();
6076 }
6077
6078 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
6079
6080 /* A small fib -- epilogue is not yet completed, but we wish to re-use
6081 this marker for the splits of EH_RETURN patterns, and nothing else
6082 uses the flag in the meantime. */
6083 epilogue_completed = 1;
6084
6085 /* Find non-fallthru edges that end with EH_RETURN instructions. On
6086 some targets, these get split to a special version of the epilogue
6087 code. In order to be able to properly annotate these with unwind
6088 info, try to split them now. If we get a valid split, drop an
6089 EPILOGUE_BEG note and mark the insns as epilogue insns. */
6090 edge e;
6091 edge_iterator ei;
6092 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6093 {
6094 rtx_insn *prev, *last, *trial;
6095
6096 if (e->flags & EDGE_FALLTHRU)
6097 continue;
6098 last = BB_END (e->src);
6099 if (!eh_returnjump_p (last))
6100 continue;
6101
6102 prev = PREV_INSN (insn: last);
6103 trial = try_split (PATTERN (insn: last), last, 1);
6104 if (trial == last)
6105 continue;
6106
6107 record_insns (insns: NEXT_INSN (insn: prev), end: NEXT_INSN (insn: trial), hashp: &epilogue_insn_hash);
6108 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6109 }
6110
6111 edge exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
6112
6113 if (exit_fallthru_edge)
6114 {
6115 if (epilogue_seq)
6116 {
6117 insert_insn_on_edge (epilogue_seq, exit_fallthru_edge);
6118 commit_edge_insertions ();
6119
6120 /* The epilogue insns we inserted may cause the exit edge to no longer
6121 be fallthru. */
6122 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6123 {
6124 if (((e->flags & EDGE_FALLTHRU) != 0)
6125 && returnjump_p (BB_END (e->src)))
6126 e->flags &= ~EDGE_FALLTHRU;
6127 }
6128
6129 find_sub_basic_blocks (BLOCK_FOR_INSN (insn: epilogue_seq));
6130 }
6131 else if (next_active_insn (BB_END (exit_fallthru_edge->src)))
6132 {
6133 /* We have a fall-through edge to the exit block, the source is not
6134 at the end of the function, and there will be an assembler epilogue
6135 at the end of the function.
6136 We can't use force_nonfallthru here, because that would try to
6137 use return. Inserting a jump 'by hand' is extremely messy, so
6138 we take advantage of cfg_layout_finalize using
6139 fixup_fallthru_exit_predecessor. */
6140 cfg_layout_initialize (0);
6141 basic_block cur_bb;
6142 FOR_EACH_BB_FN (cur_bb, cfun)
6143 if (cur_bb->index >= NUM_FIXED_BLOCKS
6144 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
6145 cur_bb->aux = cur_bb->next_bb;
6146 cfg_layout_finalize ();
6147 }
6148 }
6149
6150 /* Insert the prologue. */
6151
6152 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
6153
6154 if (split_prologue_seq || prologue_seq)
6155 {
6156 rtx_insn *split_prologue_insn = split_prologue_seq;
6157 if (split_prologue_seq)
6158 {
6159 while (split_prologue_insn && !NONDEBUG_INSN_P (split_prologue_insn))
6160 split_prologue_insn = NEXT_INSN (insn: split_prologue_insn);
6161 insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
6162 }
6163
6164 rtx_insn *prologue_insn = prologue_seq;
6165 if (prologue_seq)
6166 {
6167 while (prologue_insn && !NONDEBUG_INSN_P (prologue_insn))
6168 prologue_insn = NEXT_INSN (insn: prologue_insn);
6169 insert_insn_on_edge (prologue_seq, entry_edge);
6170 }
6171
6172 commit_edge_insertions ();
6173
6174 /* Look for basic blocks within the prologue insns. */
6175 if (split_prologue_insn
6176 && BLOCK_FOR_INSN (insn: split_prologue_insn) == NULL)
6177 split_prologue_insn = NULL;
6178 if (prologue_insn
6179 && BLOCK_FOR_INSN (insn: prologue_insn) == NULL)
6180 prologue_insn = NULL;
6181 if (split_prologue_insn || prologue_insn)
6182 {
6183 auto_sbitmap blocks (last_basic_block_for_fn (cfun));
6184 bitmap_clear (blocks);
6185 if (split_prologue_insn)
6186 bitmap_set_bit (map: blocks,
6187 bitno: BLOCK_FOR_INSN (insn: split_prologue_insn)->index);
6188 if (prologue_insn)
6189 bitmap_set_bit (map: blocks, bitno: BLOCK_FOR_INSN (insn: prologue_insn)->index);
6190 find_many_sub_basic_blocks (blocks);
6191 }
6192 }
6193
6194 default_rtl_profile ();
6195
6196 /* Emit sibling epilogues before any sibling call sites. */
6197 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
6198 (e = ei_safe_edge (i: ei));
6199 ei_next (i: &ei))
6200 {
6201 /* Skip those already handled, the ones that run without prologue. */
6202 if (e->flags & EDGE_IGNORE)
6203 {
6204 e->flags &= ~EDGE_IGNORE;
6205 continue;
6206 }
6207
6208 rtx_insn *insn = BB_END (e->src);
6209
6210 if (!(CALL_P (insn) && SIBLING_CALL_P (insn)))
6211 continue;
6212
6213 rtx_insn *ep_seq;
6214 if (targetm.emit_epilogue_for_sibcall)
6215 {
6216 start_sequence ();
6217 targetm.emit_epilogue_for_sibcall (as_a<rtx_call_insn *> (p: insn));
6218 ep_seq = get_insns ();
6219 end_sequence ();
6220 }
6221 else
6222 ep_seq = targetm.gen_sibcall_epilogue ();
6223 if (ep_seq)
6224 {
6225 start_sequence ();
6226 emit_note (NOTE_INSN_EPILOGUE_BEG);
6227 emit_insn (ep_seq);
6228 rtx_insn *seq = get_insns ();
6229 end_sequence ();
6230
6231 /* Retain a map of the epilogue insns. Used in life analysis to
6232 avoid getting rid of sibcall epilogue insns. Do this before we
6233 actually emit the sequence. */
6234 record_insns (insns: seq, NULL, hashp: &epilogue_insn_hash);
6235 set_insn_locations (seq, epilogue_location);
6236
6237 emit_insn_before (seq, insn);
6238
6239 find_sub_basic_blocks (BLOCK_FOR_INSN (insn));
6240 }
6241 }
6242
6243 if (epilogue_seq)
6244 {
6245 rtx_insn *insn, *next;
6246
6247 /* Similarly, move any line notes that appear after the epilogue.
6248 There is no need, however, to be quite so anal about the existence
6249 of such a note. Also possibly move
6250 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6251 info generation. */
6252 for (insn = epilogue_seq; insn; insn = next)
6253 {
6254 next = NEXT_INSN (insn);
6255 if (NOTE_P (insn)
6256 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
6257 reorder_insns (insn, insn, PREV_INSN (insn: epilogue_seq));
6258 }
6259 }
6260
6261 /* Threading the prologue and epilogue changes the artificial refs in the
6262 entry and exit blocks, and may invalidate DF info for tail calls. */
6263 if (optimize
6264 || flag_optimize_sibling_calls
6265 || flag_ipa_icf_functions
6266 || in_lto_p)
6267 df_update_entry_exit_and_calls ();
6268 else
6269 {
6270 df_update_entry_block_defs ();
6271 df_update_exit_block_uses ();
6272 }
6273}
6274
6275/* Reposition the prologue-end and epilogue-begin notes after
6276 instruction scheduling. */
6277
6278void
6279reposition_prologue_and_epilogue_notes (void)
6280{
6281 if (!targetm.have_prologue ()
6282 && !targetm.have_epilogue ()
6283 && !targetm.have_sibcall_epilogue ()
6284 && !targetm.emit_epilogue_for_sibcall)
6285 return;
6286
6287 /* Since the hash table is created on demand, the fact that it is
6288 non-null is a signal that it is non-empty. */
6289 if (prologue_insn_hash != NULL)
6290 {
6291 size_t len = prologue_insn_hash->elements ();
6292 rtx_insn *insn, *last = NULL, *note = NULL;
6293
6294 /* Scan from the beginning until we reach the last prologue insn. */
6295 /* ??? While we do have the CFG intact, there are two problems:
6296 (1) The prologue can contain loops (typically probing the stack),
6297 which means that the end of the prologue isn't in the first bb.
6298 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6299 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6300 {
6301 if (NOTE_P (insn))
6302 {
6303 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
6304 note = insn;
6305 }
6306 else if (contains (insn, hash: prologue_insn_hash))
6307 {
6308 last = insn;
6309 if (--len == 0)
6310 break;
6311 }
6312 }
6313
6314 if (last)
6315 {
6316 if (note == NULL)
6317 {
6318 /* Scan forward looking for the PROLOGUE_END note. It should
6319 be right at the beginning of the block, possibly with other
6320 insn notes that got moved there. */
6321 for (note = NEXT_INSN (insn: last); ; note = NEXT_INSN (insn: note))
6322 {
6323 if (NOTE_P (note)
6324 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
6325 break;
6326 }
6327 }
6328
6329 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6330 if (LABEL_P (last))
6331 last = NEXT_INSN (insn: last);
6332 reorder_insns (note, note, last);
6333 }
6334 }
6335
6336 if (epilogue_insn_hash != NULL)
6337 {
6338 edge_iterator ei;
6339 edge e;
6340
6341 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6342 {
6343 rtx_insn *insn, *first = NULL, *note = NULL;
6344 basic_block bb = e->src;
6345
6346 /* Scan from the beginning until we reach the first epilogue insn. */
6347 FOR_BB_INSNS (bb, insn)
6348 {
6349 if (NOTE_P (insn))
6350 {
6351 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6352 {
6353 note = insn;
6354 if (first != NULL)
6355 break;
6356 }
6357 }
6358 else if (first == NULL && contains (insn, hash: epilogue_insn_hash))
6359 {
6360 first = insn;
6361 if (note != NULL)
6362 break;
6363 }
6364 }
6365
6366 if (note)
6367 {
6368 /* If the function has a single basic block, and no real
6369 epilogue insns (e.g. sibcall with no cleanup), the
6370 epilogue note can get scheduled before the prologue
6371 note. If we have frame related prologue insns, having
6372 them scanned during the epilogue will result in a crash.
6373 In this case re-order the epilogue note to just before
6374 the last insn in the block. */
6375 if (first == NULL)
6376 first = BB_END (bb);
6377
6378 if (PREV_INSN (insn: first) != note)
6379 reorder_insns (note, note, PREV_INSN (insn: first));
6380 }
6381 }
6382 }
6383}
6384
6385/* Returns the name of function declared by FNDECL. */
6386const char *
6387fndecl_name (tree fndecl)
6388{
6389 if (fndecl == NULL)
6390 return "(nofn)";
6391 return lang_hooks.decl_printable_name (fndecl, 1);
6392}
6393
6394/* Returns the name of function FN. */
6395const char *
6396function_name (const function *fn)
6397{
6398 tree fndecl = (fn == NULL) ? NULL : fn->decl;
6399 return fndecl_name (fndecl);
6400}
6401
6402/* Returns the name of the current function. */
6403const char *
6404current_function_name (void)
6405{
6406 return function_name (fn: cfun);
6407}
6408
6409
6410static void
6411rest_of_handle_check_leaf_regs (void)
6412{
6413#ifdef LEAF_REGISTERS
6414 crtl->uses_only_leaf_regs
6415 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6416#endif
6417}
6418
6419/* Insert a TYPE into the used types hash table of CFUN. */
6420
6421static void
6422used_types_insert_helper (tree type, struct function *func)
6423{
6424 if (type != NULL && func != NULL)
6425 {
6426 if (func->used_types_hash == NULL)
6427 func->used_types_hash = hash_set<tree>::create_ggc (n: 37);
6428
6429 func->used_types_hash->add (k: type);
6430 }
6431}
6432
6433/* Given a type, insert it into the used hash table in cfun. */
6434void
6435used_types_insert (tree t)
6436{
6437 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
6438 if (TYPE_NAME (t))
6439 break;
6440 else
6441 t = TREE_TYPE (t);
6442 if (TREE_CODE (t) == ERROR_MARK)
6443 return;
6444 if (TYPE_NAME (t) == NULL_TREE
6445 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6446 t = TYPE_MAIN_VARIANT (t);
6447 if (debug_info_level > DINFO_LEVEL_NONE)
6448 {
6449 if (cfun)
6450 used_types_insert_helper (type: t, func: cfun);
6451 else
6452 {
6453 /* So this might be a type referenced by a global variable.
6454 Record that type so that we can later decide to emit its
6455 debug information. */
6456 vec_safe_push (v&: types_used_by_cur_var_decl, obj: t);
6457 }
6458 }
6459}
6460
6461/* Helper to Hash a struct types_used_by_vars_entry. */
6462
6463static hashval_t
6464hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6465{
6466 gcc_assert (entry && entry->var_decl && entry->type);
6467
6468 return iterative_hash_object (entry->type,
6469 iterative_hash_object (entry->var_decl, 0));
6470}
6471
6472/* Hash function of the types_used_by_vars_entry hash table. */
6473
6474hashval_t
6475used_type_hasher::hash (types_used_by_vars_entry *entry)
6476{
6477 return hash_types_used_by_vars_entry (entry);
6478}
6479
6480/*Equality function of the types_used_by_vars_entry hash table. */
6481
6482bool
6483used_type_hasher::equal (types_used_by_vars_entry *e1,
6484 types_used_by_vars_entry *e2)
6485{
6486 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6487}
6488
6489/* Inserts an entry into the types_used_by_vars_hash hash table. */
6490
6491void
6492types_used_by_var_decl_insert (tree type, tree var_decl)
6493{
6494 if (type != NULL && var_decl != NULL)
6495 {
6496 types_used_by_vars_entry **slot;
6497 struct types_used_by_vars_entry e;
6498 e.var_decl = var_decl;
6499 e.type = type;
6500 if (types_used_by_vars_hash == NULL)
6501 types_used_by_vars_hash
6502 = hash_table<used_type_hasher>::create_ggc (n: 37);
6503
6504 slot = types_used_by_vars_hash->find_slot (value: &e, insert: INSERT);
6505 if (*slot == NULL)
6506 {
6507 struct types_used_by_vars_entry *entry;
6508 entry = ggc_alloc<types_used_by_vars_entry> ();
6509 entry->type = type;
6510 entry->var_decl = var_decl;
6511 *slot = entry;
6512 }
6513 }
6514}
6515
6516namespace {
6517
6518const pass_data pass_data_leaf_regs =
6519{
6520 .type: RTL_PASS, /* type */
6521 .name: "*leaf_regs", /* name */
6522 .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */
6523 .tv_id: TV_NONE, /* tv_id */
6524 .properties_required: 0, /* properties_required */
6525 .properties_provided: 0, /* properties_provided */
6526 .properties_destroyed: 0, /* properties_destroyed */
6527 .todo_flags_start: 0, /* todo_flags_start */
6528 .todo_flags_finish: 0, /* todo_flags_finish */
6529};
6530
6531class pass_leaf_regs : public rtl_opt_pass
6532{
6533public:
6534 pass_leaf_regs (gcc::context *ctxt)
6535 : rtl_opt_pass (pass_data_leaf_regs, ctxt)
6536 {}
6537
6538 /* opt_pass methods: */
6539 unsigned int execute (function *) final override
6540 {
6541 rest_of_handle_check_leaf_regs ();
6542 return 0;
6543 }
6544
6545}; // class pass_leaf_regs
6546
6547} // anon namespace
6548
6549rtl_opt_pass *
6550make_pass_leaf_regs (gcc::context *ctxt)
6551{
6552 return new pass_leaf_regs (ctxt);
6553}
6554
6555static void
6556rest_of_handle_thread_prologue_and_epilogue (function *fun)
6557{
6558 /* prepare_shrink_wrap is sensitive to the block structure of the control
6559 flow graph, so clean it up first. */
6560 if (optimize)
6561 cleanup_cfg (0);
6562
6563 /* On some machines, the prologue and epilogue code, or parts thereof,
6564 can be represented as RTL. Doing so lets us schedule insns between
6565 it and the rest of the code and also allows delayed branch
6566 scheduling to operate in the epilogue. */
6567 thread_prologue_and_epilogue_insns ();
6568
6569 /* Some non-cold blocks may now be only reachable from cold blocks.
6570 Fix that up. */
6571 fixup_partitions ();
6572
6573 /* After prologue and epilogue generation, the judgement on whether
6574 one memory access onto stack frame may trap or not could change,
6575 since we get more exact stack information by now. So try to
6576 remove any EH edges here, see PR90259. */
6577 if (fun->can_throw_non_call_exceptions)
6578 purge_all_dead_edges ();
6579
6580 /* Shrink-wrapping can result in unreachable edges in the epilogue,
6581 see PR57320. */
6582 cleanup_cfg (optimize ? CLEANUP_EXPENSIVE : 0);
6583
6584 /* The stack usage info is finalized during prologue expansion. */
6585 if (flag_stack_usage_info || flag_callgraph_info)
6586 output_stack_usage ();
6587}
6588
6589/* Record a final call to CALLEE at LOCATION. */
6590
6591void
6592record_final_call (tree callee, location_t location)
6593{
6594 struct callinfo_callee datum = { .location: location, .decl: callee };
6595 vec_safe_push (v&: cfun->su->callees, obj: datum);
6596}
6597
6598/* Record a dynamic allocation made for DECL_OR_EXP. */
6599
6600void
6601record_dynamic_alloc (tree decl_or_exp)
6602{
6603 struct callinfo_dalloc datum;
6604
6605 if (DECL_P (decl_or_exp))
6606 {
6607 datum.location = DECL_SOURCE_LOCATION (decl_or_exp);
6608 const char *name = lang_hooks.decl_printable_name (decl_or_exp, 2);
6609 const char *dot = strrchr (s: name, c: '.');
6610 if (dot)
6611 name = dot + 1;
6612 datum.name = ggc_strdup (name);
6613 }
6614 else
6615 {
6616 datum.location = EXPR_LOCATION (decl_or_exp);
6617 datum.name = NULL;
6618 }
6619
6620 vec_safe_push (v&: cfun->su->dallocs, obj: datum);
6621}
6622
6623namespace {
6624
6625const pass_data pass_data_thread_prologue_and_epilogue =
6626{
6627 .type: RTL_PASS, /* type */
6628 .name: "pro_and_epilogue", /* name */
6629 .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */
6630 .tv_id: TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6631 .properties_required: 0, /* properties_required */
6632 .properties_provided: 0, /* properties_provided */
6633 .properties_destroyed: 0, /* properties_destroyed */
6634 .todo_flags_start: 0, /* todo_flags_start */
6635 .todo_flags_finish: ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
6636};
6637
6638class pass_thread_prologue_and_epilogue : public rtl_opt_pass
6639{
6640public:
6641 pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6642 : rtl_opt_pass (pass_data_thread_prologue_and_epilogue, ctxt)
6643 {}
6644
6645 /* opt_pass methods: */
6646 bool gate (function *) final override
6647 {
6648 return !targetm.use_late_prologue_epilogue ();
6649 }
6650
6651 unsigned int execute (function * fun) final override
6652 {
6653 rest_of_handle_thread_prologue_and_epilogue (fun);
6654 return 0;
6655 }
6656
6657}; // class pass_thread_prologue_and_epilogue
6658
6659const pass_data pass_data_late_thread_prologue_and_epilogue =
6660{
6661 .type: RTL_PASS, /* type */
6662 .name: "late_pro_and_epilogue", /* name */
6663 .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */
6664 .tv_id: TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6665 .properties_required: 0, /* properties_required */
6666 .properties_provided: 0, /* properties_provided */
6667 .properties_destroyed: 0, /* properties_destroyed */
6668 .todo_flags_start: 0, /* todo_flags_start */
6669 .todo_flags_finish: ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
6670};
6671
6672class pass_late_thread_prologue_and_epilogue : public rtl_opt_pass
6673{
6674public:
6675 pass_late_thread_prologue_and_epilogue (gcc::context *ctxt)
6676 : rtl_opt_pass (pass_data_late_thread_prologue_and_epilogue, ctxt)
6677 {}
6678
6679 /* opt_pass methods: */
6680 bool gate (function *) final override
6681 {
6682 return targetm.use_late_prologue_epilogue ();
6683 }
6684
6685 unsigned int execute (function *fn) final override
6686 {
6687 /* It's not currently possible to have both delay slots and
6688 late prologue/epilogue, since the latter has to run before
6689 the former, and the former won't honor whatever restrictions
6690 the latter is trying to enforce. */
6691 gcc_assert (!DELAY_SLOTS);
6692 rest_of_handle_thread_prologue_and_epilogue (fun: fn);
6693 return 0;
6694 }
6695}; // class pass_late_thread_prologue_and_epilogue
6696
6697} // anon namespace
6698
6699rtl_opt_pass *
6700make_pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6701{
6702 return new pass_thread_prologue_and_epilogue (ctxt);
6703}
6704
6705rtl_opt_pass *
6706make_pass_late_thread_prologue_and_epilogue (gcc::context *ctxt)
6707{
6708 return new pass_late_thread_prologue_and_epilogue (ctxt);
6709}
6710
6711namespace {
6712
6713const pass_data pass_data_zero_call_used_regs =
6714{
6715 .type: RTL_PASS, /* type */
6716 .name: "zero_call_used_regs", /* name */
6717 .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */
6718 .tv_id: TV_NONE, /* tv_id */
6719 .properties_required: 0, /* properties_required */
6720 .properties_provided: 0, /* properties_provided */
6721 .properties_destroyed: 0, /* properties_destroyed */
6722 .todo_flags_start: 0, /* todo_flags_start */
6723 .todo_flags_finish: 0, /* todo_flags_finish */
6724};
6725
6726class pass_zero_call_used_regs: public rtl_opt_pass
6727{
6728public:
6729 pass_zero_call_used_regs (gcc::context *ctxt)
6730 : rtl_opt_pass (pass_data_zero_call_used_regs, ctxt)
6731 {}
6732
6733 /* opt_pass methods: */
6734 unsigned int execute (function *) final override;
6735
6736}; // class pass_zero_call_used_regs
6737
6738unsigned int
6739pass_zero_call_used_regs::execute (function *fun)
6740{
6741 using namespace zero_regs_flags;
6742 unsigned int zero_regs_type = UNSET;
6743
6744 tree attr_zero_regs = lookup_attribute (attr_name: "zero_call_used_regs",
6745 DECL_ATTRIBUTES (fun->decl));
6746
6747 /* Get the type of zero_call_used_regs from function attribute.
6748 We have filtered out invalid attribute values already at this point. */
6749 if (attr_zero_regs)
6750 {
6751 /* The TREE_VALUE of an attribute is a TREE_LIST whose TREE_VALUE
6752 is the attribute argument's value. */
6753 attr_zero_regs = TREE_VALUE (attr_zero_regs);
6754 gcc_assert (TREE_CODE (attr_zero_regs) == TREE_LIST);
6755 attr_zero_regs = TREE_VALUE (attr_zero_regs);
6756 gcc_assert (TREE_CODE (attr_zero_regs) == STRING_CST);
6757
6758 for (unsigned int i = 0; zero_call_used_regs_opts[i].name != NULL; ++i)
6759 if (strcmp (TREE_STRING_POINTER (attr_zero_regs),
6760 s2: zero_call_used_regs_opts[i].name) == 0)
6761 {
6762 zero_regs_type = zero_call_used_regs_opts[i].flag;
6763 break;
6764 }
6765 }
6766
6767 if (!zero_regs_type)
6768 zero_regs_type = flag_zero_call_used_regs;
6769
6770 /* No need to zero call-used-regs when no user request is present. */
6771 if (!(zero_regs_type & ENABLED))
6772 return 0;
6773
6774 edge_iterator ei;
6775 edge e;
6776
6777 /* This pass needs data flow information. */
6778 df_analyze ();
6779
6780 /* Iterate over the function's return instructions and insert any
6781 register zeroing required by the -fzero-call-used-regs command-line
6782 option or the "zero_call_used_regs" function attribute. */
6783 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6784 {
6785 rtx_insn *insn = BB_END (e->src);
6786 if (JUMP_P (insn) && ANY_RETURN_P (JUMP_LABEL (insn)))
6787 gen_call_used_regs_seq (ret: insn, zero_regs_type);
6788 }
6789
6790 return 0;
6791}
6792
6793} // anon namespace
6794
6795rtl_opt_pass *
6796make_pass_zero_call_used_regs (gcc::context *ctxt)
6797{
6798 return new pass_zero_call_used_regs (ctxt);
6799}
6800
6801/* If CONSTRAINT is a matching constraint, then return its number.
6802 Otherwise, return -1. */
6803
6804static int
6805matching_constraint_num (const char *constraint)
6806{
6807 if (*constraint == '%')
6808 constraint++;
6809
6810 if (IN_RANGE (*constraint, '0', '9'))
6811 return strtoul (nptr: constraint, NULL, base: 10);
6812
6813 return -1;
6814}
6815
6816/* This mini-pass fixes fall-out from SSA in asm statements that have
6817 in-out constraints. Say you start with
6818
6819 orig = inout;
6820 asm ("": "+mr" (inout));
6821 use (orig);
6822
6823 which is transformed very early to use explicit output and match operands:
6824
6825 orig = inout;
6826 asm ("": "=mr" (inout) : "0" (inout));
6827 use (orig);
6828
6829 Or, after SSA and copyprop,
6830
6831 asm ("": "=mr" (inout_2) : "0" (inout_1));
6832 use (inout_1);
6833
6834 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6835 they represent two separate values, so they will get different pseudo
6836 registers during expansion. Then, since the two operands need to match
6837 per the constraints, but use different pseudo registers, reload can
6838 only register a reload for these operands. But reloads can only be
6839 satisfied by hardregs, not by memory, so we need a register for this
6840 reload, just because we are presented with non-matching operands.
6841 So, even though we allow memory for this operand, no memory can be
6842 used for it, just because the two operands don't match. This can
6843 cause reload failures on register-starved targets.
6844
6845 So it's a symptom of reload not being able to use memory for reloads
6846 or, alternatively it's also a symptom of both operands not coming into
6847 reload as matching (in which case the pseudo could go to memory just
6848 fine, as the alternative allows it, and no reload would be necessary).
6849 We fix the latter problem here, by transforming
6850
6851 asm ("": "=mr" (inout_2) : "0" (inout_1));
6852
6853 back to
6854
6855 inout_2 = inout_1;
6856 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6857
6858static void
6859match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs)
6860{
6861 int i;
6862 bool changed = false;
6863 rtx op = SET_SRC (p_sets[0]);
6864 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6865 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
6866 bool *output_matched = XALLOCAVEC (bool, noutputs);
6867
6868 memset (s: output_matched, c: 0, n: noutputs * sizeof (bool));
6869 for (i = 0; i < ninputs; i++)
6870 {
6871 rtx input, output;
6872 rtx_insn *insns;
6873 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6874 int match, j;
6875
6876 match = matching_constraint_num (constraint);
6877 if (match < 0)
6878 continue;
6879
6880 gcc_assert (match < noutputs);
6881 output = SET_DEST (p_sets[match]);
6882 input = RTVEC_ELT (inputs, i);
6883 /* Only do the transformation for pseudos. */
6884 if (! REG_P (output)
6885 || rtx_equal_p (output, input)
6886 || !(REG_P (input) || SUBREG_P (input)
6887 || MEM_P (input) || CONSTANT_P (input))
6888 || !general_operand (input, GET_MODE (output)))
6889 continue;
6890
6891 /* We can't do anything if the output is also used as input,
6892 as we're going to overwrite it. */
6893 for (j = 0; j < ninputs; j++)
6894 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6895 break;
6896 if (j != ninputs)
6897 continue;
6898
6899 /* Avoid changing the same input several times. For
6900 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6901 only change it once (to out1), rather than changing it
6902 first to out1 and afterwards to out2. */
6903 if (i > 0)
6904 {
6905 for (j = 0; j < noutputs; j++)
6906 if (output_matched[j] && input == SET_DEST (p_sets[j]))
6907 break;
6908 if (j != noutputs)
6909 continue;
6910 }
6911 output_matched[match] = true;
6912
6913 start_sequence ();
6914 emit_move_insn (output, copy_rtx (input));
6915 insns = get_insns ();
6916 end_sequence ();
6917 emit_insn_before (insns, insn);
6918
6919 constraint = ASM_OPERANDS_OUTPUT_CONSTRAINT(SET_SRC(p_sets[match]));
6920 bool early_clobber_p = strchr (s: constraint, c: '&') != NULL;
6921
6922 /* Now replace all mentions of the input with output. We can't
6923 just replace the occurrence in inputs[i], as the register might
6924 also be used in some other input (or even in an address of an
6925 output), which would mean possibly increasing the number of
6926 inputs by one (namely 'output' in addition), which might pose
6927 a too complicated problem for reload to solve. E.g. this situation:
6928
6929 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6930
6931 Here 'input' is used in two occurrences as input (once for the
6932 input operand, once for the address in the second output operand).
6933 If we would replace only the occurrence of the input operand (to
6934 make the matching) we would be left with this:
6935
6936 output = input
6937 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6938
6939 Now we suddenly have two different input values (containing the same
6940 value, but different pseudos) where we formerly had only one.
6941 With more complicated asms this might lead to reload failures
6942 which wouldn't have happen without this pass. So, iterate over
6943 all operands and replace all occurrences of the register used.
6944
6945 However, if one or more of the 'input' uses have a non-matching
6946 constraint and the matched output operand is an early clobber
6947 operand, then do not replace the input operand, since by definition
6948 it conflicts with the output operand and cannot share the same
6949 register. See PR89313 for details. */
6950
6951 for (j = 0; j < noutputs; j++)
6952 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
6953 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
6954 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
6955 input, output);
6956 for (j = 0; j < ninputs; j++)
6957 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
6958 {
6959 if (!early_clobber_p
6960 || match == matching_constraint_num
6961 (ASM_OPERANDS_INPUT_CONSTRAINT (op, j)))
6962 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
6963 input, output);
6964 }
6965
6966 changed = true;
6967 }
6968
6969 if (changed)
6970 df_insn_rescan (insn);
6971}
6972
6973/* Add the decl D to the local_decls list of FUN. */
6974
6975void
6976add_local_decl (struct function *fun, tree d)
6977{
6978 gcc_assert (VAR_P (d));
6979 vec_safe_push (v&: fun->local_decls, obj: d);
6980}
6981
6982namespace {
6983
6984const pass_data pass_data_match_asm_constraints =
6985{
6986 .type: RTL_PASS, /* type */
6987 .name: "asmcons", /* name */
6988 .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */
6989 .tv_id: TV_NONE, /* tv_id */
6990 .properties_required: 0, /* properties_required */
6991 .properties_provided: 0, /* properties_provided */
6992 .properties_destroyed: 0, /* properties_destroyed */
6993 .todo_flags_start: 0, /* todo_flags_start */
6994 .todo_flags_finish: 0, /* todo_flags_finish */
6995};
6996
6997class pass_match_asm_constraints : public rtl_opt_pass
6998{
6999public:
7000 pass_match_asm_constraints (gcc::context *ctxt)
7001 : rtl_opt_pass (pass_data_match_asm_constraints, ctxt)
7002 {}
7003
7004 /* opt_pass methods: */
7005 unsigned int execute (function *) final override;
7006
7007}; // class pass_match_asm_constraints
7008
7009unsigned
7010pass_match_asm_constraints::execute (function *fun)
7011{
7012 basic_block bb;
7013 rtx_insn *insn;
7014 rtx pat, *p_sets;
7015 int noutputs;
7016
7017 if (!crtl->has_asm_statement)
7018 return 0;
7019
7020 df_set_flags (DF_DEFER_INSN_RESCAN);
7021 FOR_EACH_BB_FN (bb, fun)
7022 {
7023 FOR_BB_INSNS (bb, insn)
7024 {
7025 if (!INSN_P (insn))
7026 continue;
7027
7028 pat = PATTERN (insn);
7029 if (GET_CODE (pat) == PARALLEL)
7030 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
7031 else if (GET_CODE (pat) == SET)
7032 p_sets = &PATTERN (insn), noutputs = 1;
7033 else
7034 continue;
7035
7036 if (GET_CODE (*p_sets) == SET
7037 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
7038 match_asm_constraints_1 (insn, p_sets, noutputs);
7039 }
7040 }
7041
7042 return TODO_df_finish;
7043}
7044
7045} // anon namespace
7046
7047rtl_opt_pass *
7048make_pass_match_asm_constraints (gcc::context *ctxt)
7049{
7050 return new pass_match_asm_constraints (ctxt);
7051}
7052
7053
7054#include "gt-function.h"
7055

source code of gcc/function.cc