1/* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20/* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
24
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
28
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
32 not get a hard register. */
33
34#include "config.h"
35#include "system.h"
36#include "coretypes.h"
37#include "backend.h"
38#include "target.h"
39#include "rtl.h"
40#include "tree.h"
41#include "gimple-expr.h"
42#include "cfghooks.h"
43#include "df.h"
44#include "memmodel.h"
45#include "tm_p.h"
46#include "stringpool.h"
47#include "expmed.h"
48#include "optabs.h"
49#include "regs.h"
50#include "emit-rtl.h"
51#include "recog.h"
52#include "rtl-error.h"
53#include "alias.h"
54#include "fold-const.h"
55#include "stor-layout.h"
56#include "varasm.h"
57#include "except.h"
58#include "dojump.h"
59#include "explow.h"
60#include "calls.h"
61#include "expr.h"
62#include "optabs-tree.h"
63#include "output.h"
64#include "langhooks.h"
65#include "common/common-target.h"
66#include "gimplify.h"
67#include "tree-pass.h"
68#include "cfgrtl.h"
69#include "cfganal.h"
70#include "cfgbuild.h"
71#include "cfgcleanup.h"
72#include "cfgexpand.h"
73#include "shrink-wrap.h"
74#include "toplev.h"
75#include "rtl-iter.h"
76#include "tree-chkp.h"
77#include "rtl-chkp.h"
78#include "tree-dfa.h"
79#include "tree-ssa.h"
80#include "stringpool.h"
81#include "attribs.h"
82
83/* So we can assign to cfun in this file. */
84#undef cfun
85
86#ifndef STACK_ALIGNMENT_NEEDED
87#define STACK_ALIGNMENT_NEEDED 1
88#endif
89
90#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
91
92/* Round a value to the lowest integer less than it that is a multiple of
93 the required alignment. Avoid using division in case the value is
94 negative. Assume the alignment is a power of two. */
95#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
96
97/* Similar, but round to the next highest integer that meets the
98 alignment. */
99#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
100
101/* Nonzero once virtual register instantiation has been done.
102 assign_stack_local uses frame_pointer_rtx when this is nonzero.
103 calls.c:emit_library_call_value_1 uses it to set up
104 post-instantiation libcalls. */
105int virtuals_instantiated;
106
107/* Assign unique numbers to labels generated for profiling, debugging, etc. */
108static GTY(()) int funcdef_no;
109
110/* These variables hold pointers to functions to create and destroy
111 target specific, per-function data structures. */
112struct machine_function * (*init_machine_status) (void);
113
114/* The currently compiled function. */
115struct function *cfun = 0;
116
117/* These hashes record the prologue and epilogue insns. */
118
119struct insn_cache_hasher : ggc_cache_ptr_hash<rtx_def>
120{
121 static hashval_t hash (rtx x) { return htab_hash_pointer (x); }
122 static bool equal (rtx a, rtx b) { return a == b; }
123};
124
125static GTY((cache))
126 hash_table<insn_cache_hasher> *prologue_insn_hash;
127static GTY((cache))
128 hash_table<insn_cache_hasher> *epilogue_insn_hash;
129
130
131hash_table<used_type_hasher> *types_used_by_vars_hash = NULL;
132vec<tree, va_gc> *types_used_by_cur_var_decl;
133
134/* Forward declarations. */
135
136static struct temp_slot *find_temp_slot_from_address (rtx);
137static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
138static void pad_below (struct args_size *, machine_mode, tree);
139static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
140static int all_blocks (tree, tree *);
141static tree *get_block_vector (tree, int *);
142extern tree debug_find_var_in_block_tree (tree, tree);
143/* We always define `record_insns' even if it's not used so that we
144 can always export `prologue_epilogue_contains'. */
145static void record_insns (rtx_insn *, rtx, hash_table<insn_cache_hasher> **)
146 ATTRIBUTE_UNUSED;
147static bool contains (const rtx_insn *, hash_table<insn_cache_hasher> *);
148static void prepare_function_start (void);
149static void do_clobber_return_reg (rtx, void *);
150static void do_use_return_reg (rtx, void *);
151
152
153/* Stack of nested functions. */
154/* Keep track of the cfun stack. */
155
156static vec<function *> function_context_stack;
157
158/* Save the current context for compilation of a nested function.
159 This is called from language-specific code. */
160
161void
162push_function_context (void)
163{
164 if (cfun == 0)
165 allocate_struct_function (NULL, false);
166
167 function_context_stack.safe_push (cfun);
168 set_cfun (NULL);
169}
170
171/* Restore the last saved context, at the end of a nested function.
172 This function is called from language-specific code. */
173
174void
175pop_function_context (void)
176{
177 struct function *p = function_context_stack.pop ();
178 set_cfun (p);
179 current_function_decl = p->decl;
180
181 /* Reset variables that have known state during rtx generation. */
182 virtuals_instantiated = 0;
183 generating_concat_p = 1;
184}
185
186/* Clear out all parts of the state in F that can safely be discarded
187 after the function has been parsed, but not compiled, to let
188 garbage collection reclaim the memory. */
189
190void
191free_after_parsing (struct function *f)
192{
193 f->language = 0;
194}
195
196/* Clear out all parts of the state in F that can safely be discarded
197 after the function has been compiled, to let garbage collection
198 reclaim the memory. */
199
200void
201free_after_compilation (struct function *f)
202{
203 prologue_insn_hash = NULL;
204 epilogue_insn_hash = NULL;
205
206 free (crtl->emit.regno_pointer_align);
207
208 memset (crtl, 0, sizeof (struct rtl_data));
209 f->eh = NULL;
210 f->machine = NULL;
211 f->cfg = NULL;
212 f->curr_properties &= ~PROP_cfg;
213
214 regno_reg_rtx = NULL;
215}
216
217/* Return size needed for stack frame based on slots so far allocated.
218 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
219 the caller may have to do that. */
220
221HOST_WIDE_INT
222get_frame_size (void)
223{
224 if (FRAME_GROWS_DOWNWARD)
225 return -frame_offset;
226 else
227 return frame_offset;
228}
229
230/* Issue an error message and return TRUE if frame OFFSET overflows in
231 the signed target pointer arithmetics for function FUNC. Otherwise
232 return FALSE. */
233
234bool
235frame_offset_overflow (HOST_WIDE_INT offset, tree func)
236{
237 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
238
239 if (size > (HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (Pmode) - 1))
240 /* Leave room for the fixed part of the frame. */
241 - 64 * UNITS_PER_WORD)
242 {
243 error_at (DECL_SOURCE_LOCATION (func),
244 "total size of local objects too large");
245 return TRUE;
246 }
247
248 return FALSE;
249}
250
251/* Return the minimum spill slot alignment for a register of mode MODE. */
252
253unsigned int
254spill_slot_alignment (machine_mode mode ATTRIBUTE_UNUSED)
255{
256 return STACK_SLOT_ALIGNMENT (NULL_TREE, mode, GET_MODE_ALIGNMENT (mode));
257}
258
259/* Return stack slot alignment in bits for TYPE and MODE. */
260
261static unsigned int
262get_stack_local_alignment (tree type, machine_mode mode)
263{
264 unsigned int alignment;
265
266 if (mode == BLKmode)
267 alignment = BIGGEST_ALIGNMENT;
268 else
269 alignment = GET_MODE_ALIGNMENT (mode);
270
271 /* Allow the frond-end to (possibly) increase the alignment of this
272 stack slot. */
273 if (! type)
274 type = lang_hooks.types.type_for_mode (mode, 0);
275
276 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
277}
278
279/* Determine whether it is possible to fit a stack slot of size SIZE and
280 alignment ALIGNMENT into an area in the stack frame that starts at
281 frame offset START and has a length of LENGTH. If so, store the frame
282 offset to be used for the stack slot in *POFFSET and return true;
283 return false otherwise. This function will extend the frame size when
284 given a start/length pair that lies at the end of the frame. */
285
286static bool
287try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length,
288 HOST_WIDE_INT size, unsigned int alignment,
289 HOST_WIDE_INT *poffset)
290{
291 HOST_WIDE_INT this_frame_offset;
292 int frame_off, frame_alignment, frame_phase;
293
294 /* Calculate how many bytes the start of local variables is off from
295 stack alignment. */
296 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
297 frame_off = targetm.starting_frame_offset () % frame_alignment;
298 frame_phase = frame_off ? frame_alignment - frame_off : 0;
299
300 /* Round the frame offset to the specified alignment. */
301
302 /* We must be careful here, since FRAME_OFFSET might be negative and
303 division with a negative dividend isn't as well defined as we might
304 like. So we instead assume that ALIGNMENT is a power of two and
305 use logical operations which are unambiguous. */
306 if (FRAME_GROWS_DOWNWARD)
307 this_frame_offset
308 = (FLOOR_ROUND (start + length - size - frame_phase,
309 (unsigned HOST_WIDE_INT) alignment)
310 + frame_phase);
311 else
312 this_frame_offset
313 = (CEIL_ROUND (start - frame_phase,
314 (unsigned HOST_WIDE_INT) alignment)
315 + frame_phase);
316
317 /* See if it fits. If this space is at the edge of the frame,
318 consider extending the frame to make it fit. Our caller relies on
319 this when allocating a new slot. */
320 if (frame_offset == start && this_frame_offset < frame_offset)
321 frame_offset = this_frame_offset;
322 else if (this_frame_offset < start)
323 return false;
324 else if (start + length == frame_offset
325 && this_frame_offset + size > start + length)
326 frame_offset = this_frame_offset + size;
327 else if (this_frame_offset + size > start + length)
328 return false;
329
330 *poffset = this_frame_offset;
331 return true;
332}
333
334/* Create a new frame_space structure describing free space in the stack
335 frame beginning at START and ending at END, and chain it into the
336 function's frame_space_list. */
337
338static void
339add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end)
340{
341 struct frame_space *space = ggc_alloc<frame_space> ();
342 space->next = crtl->frame_space_list;
343 crtl->frame_space_list = space;
344 space->start = start;
345 space->length = end - start;
346}
347
348/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
349 with machine mode MODE.
350
351 ALIGN controls the amount of alignment for the address of the slot:
352 0 means according to MODE,
353 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
354 -2 means use BITS_PER_UNIT,
355 positive specifies alignment boundary in bits.
356
357 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
358 alignment and ASLK_RECORD_PAD bit set if we should remember
359 extra space we allocated for alignment purposes. When we are
360 called from assign_stack_temp_for_type, it is not set so we don't
361 track the same stack slot in two independent lists.
362
363 We do not round to stack_boundary here. */
364
365rtx
366assign_stack_local_1 (machine_mode mode, HOST_WIDE_INT size,
367 int align, int kind)
368{
369 rtx x, addr;
370 int bigend_correction = 0;
371 HOST_WIDE_INT slot_offset = 0, old_frame_offset;
372 unsigned int alignment, alignment_in_bits;
373
374 if (align == 0)
375 {
376 alignment = get_stack_local_alignment (NULL, mode);
377 alignment /= BITS_PER_UNIT;
378 }
379 else if (align == -1)
380 {
381 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
382 size = CEIL_ROUND (size, alignment);
383 }
384 else if (align == -2)
385 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
386 else
387 alignment = align / BITS_PER_UNIT;
388
389 alignment_in_bits = alignment * BITS_PER_UNIT;
390
391 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
392 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
393 {
394 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
395 alignment = alignment_in_bits / BITS_PER_UNIT;
396 }
397
398 if (SUPPORTS_STACK_ALIGNMENT)
399 {
400 if (crtl->stack_alignment_estimated < alignment_in_bits)
401 {
402 if (!crtl->stack_realign_processed)
403 crtl->stack_alignment_estimated = alignment_in_bits;
404 else
405 {
406 /* If stack is realigned and stack alignment value
407 hasn't been finalized, it is OK not to increase
408 stack_alignment_estimated. The bigger alignment
409 requirement is recorded in stack_alignment_needed
410 below. */
411 gcc_assert (!crtl->stack_realign_finalized);
412 if (!crtl->stack_realign_needed)
413 {
414 /* It is OK to reduce the alignment as long as the
415 requested size is 0 or the estimated stack
416 alignment >= mode alignment. */
417 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
418 || size == 0
419 || (crtl->stack_alignment_estimated
420 >= GET_MODE_ALIGNMENT (mode)));
421 alignment_in_bits = crtl->stack_alignment_estimated;
422 alignment = alignment_in_bits / BITS_PER_UNIT;
423 }
424 }
425 }
426 }
427
428 if (crtl->stack_alignment_needed < alignment_in_bits)
429 crtl->stack_alignment_needed = alignment_in_bits;
430 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
431 crtl->max_used_stack_slot_alignment = alignment_in_bits;
432
433 if (mode != BLKmode || size != 0)
434 {
435 if (kind & ASLK_RECORD_PAD)
436 {
437 struct frame_space **psp;
438
439 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
440 {
441 struct frame_space *space = *psp;
442 if (!try_fit_stack_local (space->start, space->length, size,
443 alignment, &slot_offset))
444 continue;
445 *psp = space->next;
446 if (slot_offset > space->start)
447 add_frame_space (space->start, slot_offset);
448 if (slot_offset + size < space->start + space->length)
449 add_frame_space (slot_offset + size,
450 space->start + space->length);
451 goto found_space;
452 }
453 }
454 }
455 else if (!STACK_ALIGNMENT_NEEDED)
456 {
457 slot_offset = frame_offset;
458 goto found_space;
459 }
460
461 old_frame_offset = frame_offset;
462
463 if (FRAME_GROWS_DOWNWARD)
464 {
465 frame_offset -= size;
466 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
467
468 if (kind & ASLK_RECORD_PAD)
469 {
470 if (slot_offset > frame_offset)
471 add_frame_space (frame_offset, slot_offset);
472 if (slot_offset + size < old_frame_offset)
473 add_frame_space (slot_offset + size, old_frame_offset);
474 }
475 }
476 else
477 {
478 frame_offset += size;
479 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
480
481 if (kind & ASLK_RECORD_PAD)
482 {
483 if (slot_offset > old_frame_offset)
484 add_frame_space (old_frame_offset, slot_offset);
485 if (slot_offset + size < frame_offset)
486 add_frame_space (slot_offset + size, frame_offset);
487 }
488 }
489
490 found_space:
491 /* On a big-endian machine, if we are allocating more space than we will use,
492 use the least significant bytes of those that are allocated. */
493 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
494 bigend_correction = size - GET_MODE_SIZE (mode);
495
496 /* If we have already instantiated virtual registers, return the actual
497 address relative to the frame pointer. */
498 if (virtuals_instantiated)
499 addr = plus_constant (Pmode, frame_pointer_rtx,
500 trunc_int_for_mode
501 (slot_offset + bigend_correction
502 + targetm.starting_frame_offset (), Pmode));
503 else
504 addr = plus_constant (Pmode, virtual_stack_vars_rtx,
505 trunc_int_for_mode
506 (slot_offset + bigend_correction,
507 Pmode));
508
509 x = gen_rtx_MEM (mode, addr);
510 set_mem_align (x, alignment_in_bits);
511 MEM_NOTRAP_P (x) = 1;
512
513 vec_safe_push (stack_slot_list, x);
514
515 if (frame_offset_overflow (frame_offset, current_function_decl))
516 frame_offset = 0;
517
518 return x;
519}
520
521/* Wrap up assign_stack_local_1 with last parameter as false. */
522
523rtx
524assign_stack_local (machine_mode mode, HOST_WIDE_INT size, int align)
525{
526 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
527}
528
529/* In order to evaluate some expressions, such as function calls returning
530 structures in memory, we need to temporarily allocate stack locations.
531 We record each allocated temporary in the following structure.
532
533 Associated with each temporary slot is a nesting level. When we pop up
534 one level, all temporaries associated with the previous level are freed.
535 Normally, all temporaries are freed after the execution of the statement
536 in which they were created. However, if we are inside a ({...}) grouping,
537 the result may be in a temporary and hence must be preserved. If the
538 result could be in a temporary, we preserve it if we can determine which
539 one it is in. If we cannot determine which temporary may contain the
540 result, all temporaries are preserved. A temporary is preserved by
541 pretending it was allocated at the previous nesting level. */
542
543struct GTY(()) temp_slot {
544 /* Points to next temporary slot. */
545 struct temp_slot *next;
546 /* Points to previous temporary slot. */
547 struct temp_slot *prev;
548 /* The rtx to used to reference the slot. */
549 rtx slot;
550 /* The size, in units, of the slot. */
551 HOST_WIDE_INT size;
552 /* The type of the object in the slot, or zero if it doesn't correspond
553 to a type. We use this to determine whether a slot can be reused.
554 It can be reused if objects of the type of the new slot will always
555 conflict with objects of the type of the old slot. */
556 tree type;
557 /* The alignment (in bits) of the slot. */
558 unsigned int align;
559 /* Nonzero if this temporary is currently in use. */
560 char in_use;
561 /* Nesting level at which this slot is being used. */
562 int level;
563 /* The offset of the slot from the frame_pointer, including extra space
564 for alignment. This info is for combine_temp_slots. */
565 HOST_WIDE_INT base_offset;
566 /* The size of the slot, including extra space for alignment. This
567 info is for combine_temp_slots. */
568 HOST_WIDE_INT full_size;
569};
570
571/* Entry for the below hash table. */
572struct GTY((for_user)) temp_slot_address_entry {
573 hashval_t hash;
574 rtx address;
575 struct temp_slot *temp_slot;
576};
577
578struct temp_address_hasher : ggc_ptr_hash<temp_slot_address_entry>
579{
580 static hashval_t hash (temp_slot_address_entry *);
581 static bool equal (temp_slot_address_entry *, temp_slot_address_entry *);
582};
583
584/* A table of addresses that represent a stack slot. The table is a mapping
585 from address RTXen to a temp slot. */
586static GTY(()) hash_table<temp_address_hasher> *temp_slot_address_table;
587static size_t n_temp_slots_in_use;
588
589/* Removes temporary slot TEMP from LIST. */
590
591static void
592cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
593{
594 if (temp->next)
595 temp->next->prev = temp->prev;
596 if (temp->prev)
597 temp->prev->next = temp->next;
598 else
599 *list = temp->next;
600
601 temp->prev = temp->next = NULL;
602}
603
604/* Inserts temporary slot TEMP to LIST. */
605
606static void
607insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
608{
609 temp->next = *list;
610 if (*list)
611 (*list)->prev = temp;
612 temp->prev = NULL;
613 *list = temp;
614}
615
616/* Returns the list of used temp slots at LEVEL. */
617
618static struct temp_slot **
619temp_slots_at_level (int level)
620{
621 if (level >= (int) vec_safe_length (used_temp_slots))
622 vec_safe_grow_cleared (used_temp_slots, level + 1);
623
624 return &(*used_temp_slots)[level];
625}
626
627/* Returns the maximal temporary slot level. */
628
629static int
630max_slot_level (void)
631{
632 if (!used_temp_slots)
633 return -1;
634
635 return used_temp_slots->length () - 1;
636}
637
638/* Moves temporary slot TEMP to LEVEL. */
639
640static void
641move_slot_to_level (struct temp_slot *temp, int level)
642{
643 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
644 insert_slot_to_list (temp, temp_slots_at_level (level));
645 temp->level = level;
646}
647
648/* Make temporary slot TEMP available. */
649
650static void
651make_slot_available (struct temp_slot *temp)
652{
653 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
654 insert_slot_to_list (temp, &avail_temp_slots);
655 temp->in_use = 0;
656 temp->level = -1;
657 n_temp_slots_in_use--;
658}
659
660/* Compute the hash value for an address -> temp slot mapping.
661 The value is cached on the mapping entry. */
662static hashval_t
663temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
664{
665 int do_not_record = 0;
666 return hash_rtx (t->address, GET_MODE (t->address),
667 &do_not_record, NULL, false);
668}
669
670/* Return the hash value for an address -> temp slot mapping. */
671hashval_t
672temp_address_hasher::hash (temp_slot_address_entry *t)
673{
674 return t->hash;
675}
676
677/* Compare two address -> temp slot mapping entries. */
678bool
679temp_address_hasher::equal (temp_slot_address_entry *t1,
680 temp_slot_address_entry *t2)
681{
682 return exp_equiv_p (t1->address, t2->address, 0, true);
683}
684
685/* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
686static void
687insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
688{
689 struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
690 t->address = address;
691 t->temp_slot = temp_slot;
692 t->hash = temp_slot_address_compute_hash (t);
693 *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t;
694}
695
696/* Remove an address -> temp slot mapping entry if the temp slot is
697 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
698int
699remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *)
700{
701 const struct temp_slot_address_entry *t = *slot;
702 if (! t->temp_slot->in_use)
703 temp_slot_address_table->clear_slot (slot);
704 return 1;
705}
706
707/* Remove all mappings of addresses to unused temp slots. */
708static void
709remove_unused_temp_slot_addresses (void)
710{
711 /* Use quicker clearing if there aren't any active temp slots. */
712 if (n_temp_slots_in_use)
713 temp_slot_address_table->traverse
714 <void *, remove_unused_temp_slot_addresses_1> (NULL);
715 else
716 temp_slot_address_table->empty ();
717}
718
719/* Find the temp slot corresponding to the object at address X. */
720
721static struct temp_slot *
722find_temp_slot_from_address (rtx x)
723{
724 struct temp_slot *p;
725 struct temp_slot_address_entry tmp, *t;
726
727 /* First try the easy way:
728 See if X exists in the address -> temp slot mapping. */
729 tmp.address = x;
730 tmp.temp_slot = NULL;
731 tmp.hash = temp_slot_address_compute_hash (&tmp);
732 t = temp_slot_address_table->find_with_hash (&tmp, tmp.hash);
733 if (t)
734 return t->temp_slot;
735
736 /* If we have a sum involving a register, see if it points to a temp
737 slot. */
738 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
739 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
740 return p;
741 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
742 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
743 return p;
744
745 /* Last resort: Address is a virtual stack var address. */
746 if (GET_CODE (x) == PLUS
747 && XEXP (x, 0) == virtual_stack_vars_rtx
748 && CONST_INT_P (XEXP (x, 1)))
749 {
750 int i;
751 for (i = max_slot_level (); i >= 0; i--)
752 for (p = *temp_slots_at_level (i); p; p = p->next)
753 {
754 if (INTVAL (XEXP (x, 1)) >= p->base_offset
755 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
756 return p;
757 }
758 }
759
760 return NULL;
761}
762
763/* Allocate a temporary stack slot and record it for possible later
764 reuse.
765
766 MODE is the machine mode to be given to the returned rtx.
767
768 SIZE is the size in units of the space required. We do no rounding here
769 since assign_stack_local will do any required rounding.
770
771 TYPE is the type that will be used for the stack slot. */
772
773rtx
774assign_stack_temp_for_type (machine_mode mode, HOST_WIDE_INT size,
775 tree type)
776{
777 unsigned int align;
778 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
779 rtx slot;
780
781 /* If SIZE is -1 it means that somebody tried to allocate a temporary
782 of a variable size. */
783 gcc_assert (size != -1);
784
785 align = get_stack_local_alignment (type, mode);
786
787 /* Try to find an available, already-allocated temporary of the proper
788 mode which meets the size and alignment requirements. Choose the
789 smallest one with the closest alignment.
790
791 If assign_stack_temp is called outside of the tree->rtl expansion,
792 we cannot reuse the stack slots (that may still refer to
793 VIRTUAL_STACK_VARS_REGNUM). */
794 if (!virtuals_instantiated)
795 {
796 for (p = avail_temp_slots; p; p = p->next)
797 {
798 if (p->align >= align && p->size >= size
799 && GET_MODE (p->slot) == mode
800 && objects_must_conflict_p (p->type, type)
801 && (best_p == 0 || best_p->size > p->size
802 || (best_p->size == p->size && best_p->align > p->align)))
803 {
804 if (p->align == align && p->size == size)
805 {
806 selected = p;
807 cut_slot_from_list (selected, &avail_temp_slots);
808 best_p = 0;
809 break;
810 }
811 best_p = p;
812 }
813 }
814 }
815
816 /* Make our best, if any, the one to use. */
817 if (best_p)
818 {
819 selected = best_p;
820 cut_slot_from_list (selected, &avail_temp_slots);
821
822 /* If there are enough aligned bytes left over, make them into a new
823 temp_slot so that the extra bytes don't get wasted. Do this only
824 for BLKmode slots, so that we can be sure of the alignment. */
825 if (GET_MODE (best_p->slot) == BLKmode)
826 {
827 int alignment = best_p->align / BITS_PER_UNIT;
828 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
829
830 if (best_p->size - rounded_size >= alignment)
831 {
832 p = ggc_alloc<temp_slot> ();
833 p->in_use = 0;
834 p->size = best_p->size - rounded_size;
835 p->base_offset = best_p->base_offset + rounded_size;
836 p->full_size = best_p->full_size - rounded_size;
837 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
838 p->align = best_p->align;
839 p->type = best_p->type;
840 insert_slot_to_list (p, &avail_temp_slots);
841
842 vec_safe_push (stack_slot_list, p->slot);
843
844 best_p->size = rounded_size;
845 best_p->full_size = rounded_size;
846 }
847 }
848 }
849
850 /* If we still didn't find one, make a new temporary. */
851 if (selected == 0)
852 {
853 HOST_WIDE_INT frame_offset_old = frame_offset;
854
855 p = ggc_alloc<temp_slot> ();
856
857 /* We are passing an explicit alignment request to assign_stack_local.
858 One side effect of that is assign_stack_local will not round SIZE
859 to ensure the frame offset remains suitably aligned.
860
861 So for requests which depended on the rounding of SIZE, we go ahead
862 and round it now. We also make sure ALIGNMENT is at least
863 BIGGEST_ALIGNMENT. */
864 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
865 p->slot = assign_stack_local_1 (mode,
866 (mode == BLKmode
867 ? CEIL_ROUND (size,
868 (int) align
869 / BITS_PER_UNIT)
870 : size),
871 align, 0);
872
873 p->align = align;
874
875 /* The following slot size computation is necessary because we don't
876 know the actual size of the temporary slot until assign_stack_local
877 has performed all the frame alignment and size rounding for the
878 requested temporary. Note that extra space added for alignment
879 can be either above or below this stack slot depending on which
880 way the frame grows. We include the extra space if and only if it
881 is above this slot. */
882 if (FRAME_GROWS_DOWNWARD)
883 p->size = frame_offset_old - frame_offset;
884 else
885 p->size = size;
886
887 /* Now define the fields used by combine_temp_slots. */
888 if (FRAME_GROWS_DOWNWARD)
889 {
890 p->base_offset = frame_offset;
891 p->full_size = frame_offset_old - frame_offset;
892 }
893 else
894 {
895 p->base_offset = frame_offset_old;
896 p->full_size = frame_offset - frame_offset_old;
897 }
898
899 selected = p;
900 }
901
902 p = selected;
903 p->in_use = 1;
904 p->type = type;
905 p->level = temp_slot_level;
906 n_temp_slots_in_use++;
907
908 pp = temp_slots_at_level (p->level);
909 insert_slot_to_list (p, pp);
910 insert_temp_slot_address (XEXP (p->slot, 0), p);
911
912 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
913 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
914 vec_safe_push (stack_slot_list, slot);
915
916 /* If we know the alias set for the memory that will be used, use
917 it. If there's no TYPE, then we don't know anything about the
918 alias set for the memory. */
919 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
920 set_mem_align (slot, align);
921
922 /* If a type is specified, set the relevant flags. */
923 if (type != 0)
924 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
925 MEM_NOTRAP_P (slot) = 1;
926
927 return slot;
928}
929
930/* Allocate a temporary stack slot and record it for possible later
931 reuse. First two arguments are same as in preceding function. */
932
933rtx
934assign_stack_temp (machine_mode mode, HOST_WIDE_INT size)
935{
936 return assign_stack_temp_for_type (mode, size, NULL_TREE);
937}
938
939/* Assign a temporary.
940 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
941 and so that should be used in error messages. In either case, we
942 allocate of the given type.
943 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
944 it is 0 if a register is OK.
945 DONT_PROMOTE is 1 if we should not promote values in register
946 to wider modes. */
947
948rtx
949assign_temp (tree type_or_decl, int memory_required,
950 int dont_promote ATTRIBUTE_UNUSED)
951{
952 tree type, decl;
953 machine_mode mode;
954#ifdef PROMOTE_MODE
955 int unsignedp;
956#endif
957
958 if (DECL_P (type_or_decl))
959 decl = type_or_decl, type = TREE_TYPE (decl);
960 else
961 decl = NULL, type = type_or_decl;
962
963 mode = TYPE_MODE (type);
964#ifdef PROMOTE_MODE
965 unsignedp = TYPE_UNSIGNED (type);
966#endif
967
968 /* Allocating temporaries of TREE_ADDRESSABLE type must be done in the front
969 end. See also create_tmp_var for the gimplification-time check. */
970 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
971
972 if (mode == BLKmode || memory_required)
973 {
974 HOST_WIDE_INT size = int_size_in_bytes (type);
975 rtx tmp;
976
977 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
978 problems with allocating the stack space. */
979 if (size == 0)
980 size = 1;
981
982 /* Unfortunately, we don't yet know how to allocate variable-sized
983 temporaries. However, sometimes we can find a fixed upper limit on
984 the size, so try that instead. */
985 else if (size == -1)
986 size = max_int_size_in_bytes (type);
987
988 /* The size of the temporary may be too large to fit into an integer. */
989 /* ??? Not sure this should happen except for user silliness, so limit
990 this to things that aren't compiler-generated temporaries. The
991 rest of the time we'll die in assign_stack_temp_for_type. */
992 if (decl && size == -1
993 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
994 {
995 error ("size of variable %q+D is too large", decl);
996 size = 1;
997 }
998
999 tmp = assign_stack_temp_for_type (mode, size, type);
1000 return tmp;
1001 }
1002
1003#ifdef PROMOTE_MODE
1004 if (! dont_promote)
1005 mode = promote_mode (type, mode, &unsignedp);
1006#endif
1007
1008 return gen_reg_rtx (mode);
1009}
1010
1011/* Combine temporary stack slots which are adjacent on the stack.
1012
1013 This allows for better use of already allocated stack space. This is only
1014 done for BLKmode slots because we can be sure that we won't have alignment
1015 problems in this case. */
1016
1017static void
1018combine_temp_slots (void)
1019{
1020 struct temp_slot *p, *q, *next, *next_q;
1021 int num_slots;
1022
1023 /* We can't combine slots, because the information about which slot
1024 is in which alias set will be lost. */
1025 if (flag_strict_aliasing)
1026 return;
1027
1028 /* If there are a lot of temp slots, don't do anything unless
1029 high levels of optimization. */
1030 if (! flag_expensive_optimizations)
1031 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1032 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1033 return;
1034
1035 for (p = avail_temp_slots; p; p = next)
1036 {
1037 int delete_p = 0;
1038
1039 next = p->next;
1040
1041 if (GET_MODE (p->slot) != BLKmode)
1042 continue;
1043
1044 for (q = p->next; q; q = next_q)
1045 {
1046 int delete_q = 0;
1047
1048 next_q = q->next;
1049
1050 if (GET_MODE (q->slot) != BLKmode)
1051 continue;
1052
1053 if (p->base_offset + p->full_size == q->base_offset)
1054 {
1055 /* Q comes after P; combine Q into P. */
1056 p->size += q->size;
1057 p->full_size += q->full_size;
1058 delete_q = 1;
1059 }
1060 else if (q->base_offset + q->full_size == p->base_offset)
1061 {
1062 /* P comes after Q; combine P into Q. */
1063 q->size += p->size;
1064 q->full_size += p->full_size;
1065 delete_p = 1;
1066 break;
1067 }
1068 if (delete_q)
1069 cut_slot_from_list (q, &avail_temp_slots);
1070 }
1071
1072 /* Either delete P or advance past it. */
1073 if (delete_p)
1074 cut_slot_from_list (p, &avail_temp_slots);
1075 }
1076}
1077
1078/* Indicate that NEW_RTX is an alternate way of referring to the temp
1079 slot that previously was known by OLD_RTX. */
1080
1081void
1082update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1083{
1084 struct temp_slot *p;
1085
1086 if (rtx_equal_p (old_rtx, new_rtx))
1087 return;
1088
1089 p = find_temp_slot_from_address (old_rtx);
1090
1091 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1092 NEW_RTX is a register, see if one operand of the PLUS is a
1093 temporary location. If so, NEW_RTX points into it. Otherwise,
1094 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1095 in common between them. If so, try a recursive call on those
1096 values. */
1097 if (p == 0)
1098 {
1099 if (GET_CODE (old_rtx) != PLUS)
1100 return;
1101
1102 if (REG_P (new_rtx))
1103 {
1104 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1105 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1106 return;
1107 }
1108 else if (GET_CODE (new_rtx) != PLUS)
1109 return;
1110
1111 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1112 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1113 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1114 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1115 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1116 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1117 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1118 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1119
1120 return;
1121 }
1122
1123 /* Otherwise add an alias for the temp's address. */
1124 insert_temp_slot_address (new_rtx, p);
1125}
1126
1127/* If X could be a reference to a temporary slot, mark that slot as
1128 belonging to the to one level higher than the current level. If X
1129 matched one of our slots, just mark that one. Otherwise, we can't
1130 easily predict which it is, so upgrade all of them.
1131
1132 This is called when an ({...}) construct occurs and a statement
1133 returns a value in memory. */
1134
1135void
1136preserve_temp_slots (rtx x)
1137{
1138 struct temp_slot *p = 0, *next;
1139
1140 if (x == 0)
1141 return;
1142
1143 /* If X is a register that is being used as a pointer, see if we have
1144 a temporary slot we know it points to. */
1145 if (REG_P (x) && REG_POINTER (x))
1146 p = find_temp_slot_from_address (x);
1147
1148 /* If X is not in memory or is at a constant address, it cannot be in
1149 a temporary slot. */
1150 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1151 return;
1152
1153 /* First see if we can find a match. */
1154 if (p == 0)
1155 p = find_temp_slot_from_address (XEXP (x, 0));
1156
1157 if (p != 0)
1158 {
1159 if (p->level == temp_slot_level)
1160 move_slot_to_level (p, temp_slot_level - 1);
1161 return;
1162 }
1163
1164 /* Otherwise, preserve all non-kept slots at this level. */
1165 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1166 {
1167 next = p->next;
1168 move_slot_to_level (p, temp_slot_level - 1);
1169 }
1170}
1171
1172/* Free all temporaries used so far. This is normally called at the
1173 end of generating code for a statement. */
1174
1175void
1176free_temp_slots (void)
1177{
1178 struct temp_slot *p, *next;
1179 bool some_available = false;
1180
1181 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1182 {
1183 next = p->next;
1184 make_slot_available (p);
1185 some_available = true;
1186 }
1187
1188 if (some_available)
1189 {
1190 remove_unused_temp_slot_addresses ();
1191 combine_temp_slots ();
1192 }
1193}
1194
1195/* Push deeper into the nesting level for stack temporaries. */
1196
1197void
1198push_temp_slots (void)
1199{
1200 temp_slot_level++;
1201}
1202
1203/* Pop a temporary nesting level. All slots in use in the current level
1204 are freed. */
1205
1206void
1207pop_temp_slots (void)
1208{
1209 free_temp_slots ();
1210 temp_slot_level--;
1211}
1212
1213/* Initialize temporary slots. */
1214
1215void
1216init_temp_slots (void)
1217{
1218 /* We have not allocated any temporaries yet. */
1219 avail_temp_slots = 0;
1220 vec_alloc (used_temp_slots, 0);
1221 temp_slot_level = 0;
1222 n_temp_slots_in_use = 0;
1223
1224 /* Set up the table to map addresses to temp slots. */
1225 if (! temp_slot_address_table)
1226 temp_slot_address_table = hash_table<temp_address_hasher>::create_ggc (32);
1227 else
1228 temp_slot_address_table->empty ();
1229}
1230
1231/* Functions and data structures to keep track of the values hard regs
1232 had at the start of the function. */
1233
1234/* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1235 and has_hard_reg_initial_val.. */
1236struct GTY(()) initial_value_pair {
1237 rtx hard_reg;
1238 rtx pseudo;
1239};
1240/* ??? This could be a VEC but there is currently no way to define an
1241 opaque VEC type. This could be worked around by defining struct
1242 initial_value_pair in function.h. */
1243struct GTY(()) initial_value_struct {
1244 int num_entries;
1245 int max_entries;
1246 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
1247};
1248
1249/* If a pseudo represents an initial hard reg (or expression), return
1250 it, else return NULL_RTX. */
1251
1252rtx
1253get_hard_reg_initial_reg (rtx reg)
1254{
1255 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1256 int i;
1257
1258 if (ivs == 0)
1259 return NULL_RTX;
1260
1261 for (i = 0; i < ivs->num_entries; i++)
1262 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1263 return ivs->entries[i].hard_reg;
1264
1265 return NULL_RTX;
1266}
1267
1268/* Make sure that there's a pseudo register of mode MODE that stores the
1269 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1270
1271rtx
1272get_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1273{
1274 struct initial_value_struct *ivs;
1275 rtx rv;
1276
1277 rv = has_hard_reg_initial_val (mode, regno);
1278 if (rv)
1279 return rv;
1280
1281 ivs = crtl->hard_reg_initial_vals;
1282 if (ivs == 0)
1283 {
1284 ivs = ggc_alloc<initial_value_struct> ();
1285 ivs->num_entries = 0;
1286 ivs->max_entries = 5;
1287 ivs->entries = ggc_vec_alloc<initial_value_pair> (5);
1288 crtl->hard_reg_initial_vals = ivs;
1289 }
1290
1291 if (ivs->num_entries >= ivs->max_entries)
1292 {
1293 ivs->max_entries += 5;
1294 ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
1295 ivs->max_entries);
1296 }
1297
1298 ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
1299 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
1300
1301 return ivs->entries[ivs->num_entries++].pseudo;
1302}
1303
1304/* See if get_hard_reg_initial_val has been used to create a pseudo
1305 for the initial value of hard register REGNO in mode MODE. Return
1306 the associated pseudo if so, otherwise return NULL. */
1307
1308rtx
1309has_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1310{
1311 struct initial_value_struct *ivs;
1312 int i;
1313
1314 ivs = crtl->hard_reg_initial_vals;
1315 if (ivs != 0)
1316 for (i = 0; i < ivs->num_entries; i++)
1317 if (GET_MODE (ivs->entries[i].hard_reg) == mode
1318 && REGNO (ivs->entries[i].hard_reg) == regno)
1319 return ivs->entries[i].pseudo;
1320
1321 return NULL_RTX;
1322}
1323
1324unsigned int
1325emit_initial_value_sets (void)
1326{
1327 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1328 int i;
1329 rtx_insn *seq;
1330
1331 if (ivs == 0)
1332 return 0;
1333
1334 start_sequence ();
1335 for (i = 0; i < ivs->num_entries; i++)
1336 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1337 seq = get_insns ();
1338 end_sequence ();
1339
1340 emit_insn_at_entry (seq);
1341 return 0;
1342}
1343
1344/* Return the hardreg-pseudoreg initial values pair entry I and
1345 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1346bool
1347initial_value_entry (int i, rtx *hreg, rtx *preg)
1348{
1349 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1350 if (!ivs || i >= ivs->num_entries)
1351 return false;
1352
1353 *hreg = ivs->entries[i].hard_reg;
1354 *preg = ivs->entries[i].pseudo;
1355 return true;
1356}
1357
1358/* These routines are responsible for converting virtual register references
1359 to the actual hard register references once RTL generation is complete.
1360
1361 The following four variables are used for communication between the
1362 routines. They contain the offsets of the virtual registers from their
1363 respective hard registers. */
1364
1365static int in_arg_offset;
1366static int var_offset;
1367static int dynamic_offset;
1368static int out_arg_offset;
1369static int cfa_offset;
1370
1371/* In most machines, the stack pointer register is equivalent to the bottom
1372 of the stack. */
1373
1374#ifndef STACK_POINTER_OFFSET
1375#define STACK_POINTER_OFFSET 0
1376#endif
1377
1378#if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1379#define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1380#endif
1381
1382/* If not defined, pick an appropriate default for the offset of dynamically
1383 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1384 INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1385
1386#ifndef STACK_DYNAMIC_OFFSET
1387
1388/* The bottom of the stack points to the actual arguments. If
1389 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1390 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1391 stack space for register parameters is not pushed by the caller, but
1392 rather part of the fixed stack areas and hence not included in
1393 `crtl->outgoing_args_size'. Nevertheless, we must allow
1394 for it when allocating stack dynamic objects. */
1395
1396#ifdef INCOMING_REG_PARM_STACK_SPACE
1397#define STACK_DYNAMIC_OFFSET(FNDECL) \
1398((ACCUMULATE_OUTGOING_ARGS \
1399 ? (crtl->outgoing_args_size \
1400 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1401 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
1402 : 0) + (STACK_POINTER_OFFSET))
1403#else
1404#define STACK_DYNAMIC_OFFSET(FNDECL) \
1405((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1406 + (STACK_POINTER_OFFSET))
1407#endif
1408#endif
1409
1410
1411/* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1412 is a virtual register, return the equivalent hard register and set the
1413 offset indirectly through the pointer. Otherwise, return 0. */
1414
1415static rtx
1416instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1417{
1418 rtx new_rtx;
1419 HOST_WIDE_INT offset;
1420
1421 if (x == virtual_incoming_args_rtx)
1422 {
1423 if (stack_realign_drap)
1424 {
1425 /* Replace virtual_incoming_args_rtx with internal arg
1426 pointer if DRAP is used to realign stack. */
1427 new_rtx = crtl->args.internal_arg_pointer;
1428 offset = 0;
1429 }
1430 else
1431 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1432 }
1433 else if (x == virtual_stack_vars_rtx)
1434 new_rtx = frame_pointer_rtx, offset = var_offset;
1435 else if (x == virtual_stack_dynamic_rtx)
1436 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1437 else if (x == virtual_outgoing_args_rtx)
1438 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1439 else if (x == virtual_cfa_rtx)
1440 {
1441#ifdef FRAME_POINTER_CFA_OFFSET
1442 new_rtx = frame_pointer_rtx;
1443#else
1444 new_rtx = arg_pointer_rtx;
1445#endif
1446 offset = cfa_offset;
1447 }
1448 else if (x == virtual_preferred_stack_boundary_rtx)
1449 {
1450 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1451 offset = 0;
1452 }
1453 else
1454 return NULL_RTX;
1455
1456 *poffset = offset;
1457 return new_rtx;
1458}
1459
1460/* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1461 registers present inside of *LOC. The expression is simplified,
1462 as much as possible, but is not to be considered "valid" in any sense
1463 implied by the target. Return true if any change is made. */
1464
1465static bool
1466instantiate_virtual_regs_in_rtx (rtx *loc)
1467{
1468 if (!*loc)
1469 return false;
1470 bool changed = false;
1471 subrtx_ptr_iterator::array_type array;
1472 FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
1473 {
1474 rtx *loc = *iter;
1475 if (rtx x = *loc)
1476 {
1477 rtx new_rtx;
1478 HOST_WIDE_INT offset;
1479 switch (GET_CODE (x))
1480 {
1481 case REG:
1482 new_rtx = instantiate_new_reg (x, &offset);
1483 if (new_rtx)
1484 {
1485 *loc = plus_constant (GET_MODE (x), new_rtx, offset);
1486 changed = true;
1487 }
1488 iter.skip_subrtxes ();
1489 break;
1490
1491 case PLUS:
1492 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1493 if (new_rtx)
1494 {
1495 XEXP (x, 0) = new_rtx;
1496 *loc = plus_constant (GET_MODE (x), x, offset, true);
1497 changed = true;
1498 iter.skip_subrtxes ();
1499 break;
1500 }
1501
1502 /* FIXME -- from old code */
1503 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1504 we can commute the PLUS and SUBREG because pointers into the
1505 frame are well-behaved. */
1506 break;
1507
1508 default:
1509 break;
1510 }
1511 }
1512 }
1513 return changed;
1514}
1515
1516/* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1517 matches the predicate for insn CODE operand OPERAND. */
1518
1519static int
1520safe_insn_predicate (int code, int operand, rtx x)
1521{
1522 return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
1523}
1524
1525/* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1526 registers present inside of insn. The result will be a valid insn. */
1527
1528static void
1529instantiate_virtual_regs_in_insn (rtx_insn *insn)
1530{
1531 HOST_WIDE_INT offset;
1532 int insn_code, i;
1533 bool any_change = false;
1534 rtx set, new_rtx, x;
1535 rtx_insn *seq;
1536
1537 /* There are some special cases to be handled first. */
1538 set = single_set (insn);
1539 if (set)
1540 {
1541 /* We're allowed to assign to a virtual register. This is interpreted
1542 to mean that the underlying register gets assigned the inverse
1543 transformation. This is used, for example, in the handling of
1544 non-local gotos. */
1545 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1546 if (new_rtx)
1547 {
1548 start_sequence ();
1549
1550 instantiate_virtual_regs_in_rtx (&SET_SRC (set));
1551 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1552 gen_int_mode (-offset, GET_MODE (new_rtx)));
1553 x = force_operand (x, new_rtx);
1554 if (x != new_rtx)
1555 emit_move_insn (new_rtx, x);
1556
1557 seq = get_insns ();
1558 end_sequence ();
1559
1560 emit_insn_before (seq, insn);
1561 delete_insn (insn);
1562 return;
1563 }
1564
1565 /* Handle a straight copy from a virtual register by generating a
1566 new add insn. The difference between this and falling through
1567 to the generic case is avoiding a new pseudo and eliminating a
1568 move insn in the initial rtl stream. */
1569 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1570 if (new_rtx && offset != 0
1571 && REG_P (SET_DEST (set))
1572 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1573 {
1574 start_sequence ();
1575
1576 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx,
1577 gen_int_mode (offset,
1578 GET_MODE (SET_DEST (set))),
1579 SET_DEST (set), 1, OPTAB_LIB_WIDEN);
1580 if (x != SET_DEST (set))
1581 emit_move_insn (SET_DEST (set), x);
1582
1583 seq = get_insns ();
1584 end_sequence ();
1585
1586 emit_insn_before (seq, insn);
1587 delete_insn (insn);
1588 return;
1589 }
1590
1591 extract_insn (insn);
1592 insn_code = INSN_CODE (insn);
1593
1594 /* Handle a plus involving a virtual register by determining if the
1595 operands remain valid if they're modified in place. */
1596 if (GET_CODE (SET_SRC (set)) == PLUS
1597 && recog_data.n_operands >= 3
1598 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1599 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1600 && CONST_INT_P (recog_data.operand[2])
1601 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1602 {
1603 offset += INTVAL (recog_data.operand[2]);
1604
1605 /* If the sum is zero, then replace with a plain move. */
1606 if (offset == 0
1607 && REG_P (SET_DEST (set))
1608 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1609 {
1610 start_sequence ();
1611 emit_move_insn (SET_DEST (set), new_rtx);
1612 seq = get_insns ();
1613 end_sequence ();
1614
1615 emit_insn_before (seq, insn);
1616 delete_insn (insn);
1617 return;
1618 }
1619
1620 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1621
1622 /* Using validate_change and apply_change_group here leaves
1623 recog_data in an invalid state. Since we know exactly what
1624 we want to check, do those two by hand. */
1625 if (safe_insn_predicate (insn_code, 1, new_rtx)
1626 && safe_insn_predicate (insn_code, 2, x))
1627 {
1628 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1629 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1630 any_change = true;
1631
1632 /* Fall through into the regular operand fixup loop in
1633 order to take care of operands other than 1 and 2. */
1634 }
1635 }
1636 }
1637 else
1638 {
1639 extract_insn (insn);
1640 insn_code = INSN_CODE (insn);
1641 }
1642
1643 /* In the general case, we expect virtual registers to appear only in
1644 operands, and then only as either bare registers or inside memories. */
1645 for (i = 0; i < recog_data.n_operands; ++i)
1646 {
1647 x = recog_data.operand[i];
1648 switch (GET_CODE (x))
1649 {
1650 case MEM:
1651 {
1652 rtx addr = XEXP (x, 0);
1653
1654 if (!instantiate_virtual_regs_in_rtx (&addr))
1655 continue;
1656
1657 start_sequence ();
1658 x = replace_equiv_address (x, addr, true);
1659 /* It may happen that the address with the virtual reg
1660 was valid (e.g. based on the virtual stack reg, which might
1661 be acceptable to the predicates with all offsets), whereas
1662 the address now isn't anymore, for instance when the address
1663 is still offsetted, but the base reg isn't virtual-stack-reg
1664 anymore. Below we would do a force_reg on the whole operand,
1665 but this insn might actually only accept memory. Hence,
1666 before doing that last resort, try to reload the address into
1667 a register, so this operand stays a MEM. */
1668 if (!safe_insn_predicate (insn_code, i, x))
1669 {
1670 addr = force_reg (GET_MODE (addr), addr);
1671 x = replace_equiv_address (x, addr, true);
1672 }
1673 seq = get_insns ();
1674 end_sequence ();
1675 if (seq)
1676 emit_insn_before (seq, insn);
1677 }
1678 break;
1679
1680 case REG:
1681 new_rtx = instantiate_new_reg (x, &offset);
1682 if (new_rtx == NULL)
1683 continue;
1684 if (offset == 0)
1685 x = new_rtx;
1686 else
1687 {
1688 start_sequence ();
1689
1690 /* Careful, special mode predicates may have stuff in
1691 insn_data[insn_code].operand[i].mode that isn't useful
1692 to us for computing a new value. */
1693 /* ??? Recognize address_operand and/or "p" constraints
1694 to see if (plus new offset) is a valid before we put
1695 this through expand_simple_binop. */
1696 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1697 gen_int_mode (offset, GET_MODE (x)),
1698 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1699 seq = get_insns ();
1700 end_sequence ();
1701 emit_insn_before (seq, insn);
1702 }
1703 break;
1704
1705 case SUBREG:
1706 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1707 if (new_rtx == NULL)
1708 continue;
1709 if (offset != 0)
1710 {
1711 start_sequence ();
1712 new_rtx = expand_simple_binop
1713 (GET_MODE (new_rtx), PLUS, new_rtx,
1714 gen_int_mode (offset, GET_MODE (new_rtx)),
1715 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1716 seq = get_insns ();
1717 end_sequence ();
1718 emit_insn_before (seq, insn);
1719 }
1720 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1721 GET_MODE (new_rtx), SUBREG_BYTE (x));
1722 gcc_assert (x);
1723 break;
1724
1725 default:
1726 continue;
1727 }
1728
1729 /* At this point, X contains the new value for the operand.
1730 Validate the new value vs the insn predicate. Note that
1731 asm insns will have insn_code -1 here. */
1732 if (!safe_insn_predicate (insn_code, i, x))
1733 {
1734 start_sequence ();
1735 if (REG_P (x))
1736 {
1737 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1738 x = copy_to_reg (x);
1739 }
1740 else
1741 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1742 seq = get_insns ();
1743 end_sequence ();
1744 if (seq)
1745 emit_insn_before (seq, insn);
1746 }
1747
1748 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1749 any_change = true;
1750 }
1751
1752 if (any_change)
1753 {
1754 /* Propagate operand changes into the duplicates. */
1755 for (i = 0; i < recog_data.n_dups; ++i)
1756 *recog_data.dup_loc[i]
1757 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1758
1759 /* Force re-recognition of the instruction for validation. */
1760 INSN_CODE (insn) = -1;
1761 }
1762
1763 if (asm_noperands (PATTERN (insn)) >= 0)
1764 {
1765 if (!check_asm_operands (PATTERN (insn)))
1766 {
1767 error_for_asm (insn, "impossible constraint in %<asm%>");
1768 /* For asm goto, instead of fixing up all the edges
1769 just clear the template and clear input operands
1770 (asm goto doesn't have any output operands). */
1771 if (JUMP_P (insn))
1772 {
1773 rtx asm_op = extract_asm_operands (PATTERN (insn));
1774 ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
1775 ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
1776 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
1777 }
1778 else
1779 delete_insn (insn);
1780 }
1781 }
1782 else
1783 {
1784 if (recog_memoized (insn) < 0)
1785 fatal_insn_not_found (insn);
1786 }
1787}
1788
1789/* Subroutine of instantiate_decls. Given RTL representing a decl,
1790 do any instantiation required. */
1791
1792void
1793instantiate_decl_rtl (rtx x)
1794{
1795 rtx addr;
1796
1797 if (x == 0)
1798 return;
1799
1800 /* If this is a CONCAT, recurse for the pieces. */
1801 if (GET_CODE (x) == CONCAT)
1802 {
1803 instantiate_decl_rtl (XEXP (x, 0));
1804 instantiate_decl_rtl (XEXP (x, 1));
1805 return;
1806 }
1807
1808 /* If this is not a MEM, no need to do anything. Similarly if the
1809 address is a constant or a register that is not a virtual register. */
1810 if (!MEM_P (x))
1811 return;
1812
1813 addr = XEXP (x, 0);
1814 if (CONSTANT_P (addr)
1815 || (REG_P (addr)
1816 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1817 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1818 return;
1819
1820 instantiate_virtual_regs_in_rtx (&XEXP (x, 0));
1821}
1822
1823/* Helper for instantiate_decls called via walk_tree: Process all decls
1824 in the given DECL_VALUE_EXPR. */
1825
1826static tree
1827instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1828{
1829 tree t = *tp;
1830 if (! EXPR_P (t))
1831 {
1832 *walk_subtrees = 0;
1833 if (DECL_P (t))
1834 {
1835 if (DECL_RTL_SET_P (t))
1836 instantiate_decl_rtl (DECL_RTL (t));
1837 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1838 && DECL_INCOMING_RTL (t))
1839 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1840 if ((VAR_P (t) || TREE_CODE (t) == RESULT_DECL)
1841 && DECL_HAS_VALUE_EXPR_P (t))
1842 {
1843 tree v = DECL_VALUE_EXPR (t);
1844 walk_tree (&v, instantiate_expr, NULL, NULL);
1845 }
1846 }
1847 }
1848 return NULL;
1849}
1850
1851/* Subroutine of instantiate_decls: Process all decls in the given
1852 BLOCK node and all its subblocks. */
1853
1854static void
1855instantiate_decls_1 (tree let)
1856{
1857 tree t;
1858
1859 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
1860 {
1861 if (DECL_RTL_SET_P (t))
1862 instantiate_decl_rtl (DECL_RTL (t));
1863 if (VAR_P (t) && DECL_HAS_VALUE_EXPR_P (t))
1864 {
1865 tree v = DECL_VALUE_EXPR (t);
1866 walk_tree (&v, instantiate_expr, NULL, NULL);
1867 }
1868 }
1869
1870 /* Process all subblocks. */
1871 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1872 instantiate_decls_1 (t);
1873}
1874
1875/* Scan all decls in FNDECL (both variables and parameters) and instantiate
1876 all virtual registers in their DECL_RTL's. */
1877
1878static void
1879instantiate_decls (tree fndecl)
1880{
1881 tree decl;
1882 unsigned ix;
1883
1884 /* Process all parameters of the function. */
1885 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
1886 {
1887 instantiate_decl_rtl (DECL_RTL (decl));
1888 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1889 if (DECL_HAS_VALUE_EXPR_P (decl))
1890 {
1891 tree v = DECL_VALUE_EXPR (decl);
1892 walk_tree (&v, instantiate_expr, NULL, NULL);
1893 }
1894 }
1895
1896 if ((decl = DECL_RESULT (fndecl))
1897 && TREE_CODE (decl) == RESULT_DECL)
1898 {
1899 if (DECL_RTL_SET_P (decl))
1900 instantiate_decl_rtl (DECL_RTL (decl));
1901 if (DECL_HAS_VALUE_EXPR_P (decl))
1902 {
1903 tree v = DECL_VALUE_EXPR (decl);
1904 walk_tree (&v, instantiate_expr, NULL, NULL);
1905 }
1906 }
1907
1908 /* Process the saved static chain if it exists. */
1909 decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl;
1910 if (decl && DECL_HAS_VALUE_EXPR_P (decl))
1911 instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl)));
1912
1913 /* Now process all variables defined in the function or its subblocks. */
1914 if (DECL_INITIAL (fndecl))
1915 instantiate_decls_1 (DECL_INITIAL (fndecl));
1916
1917 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1918 if (DECL_RTL_SET_P (decl))
1919 instantiate_decl_rtl (DECL_RTL (decl));
1920 vec_free (cfun->local_decls);
1921}
1922
1923/* Pass through the INSNS of function FNDECL and convert virtual register
1924 references to hard register references. */
1925
1926static unsigned int
1927instantiate_virtual_regs (void)
1928{
1929 rtx_insn *insn;
1930
1931 /* Compute the offsets to use for this function. */
1932 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1933 var_offset = targetm.starting_frame_offset ();
1934 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1935 out_arg_offset = STACK_POINTER_OFFSET;
1936#ifdef FRAME_POINTER_CFA_OFFSET
1937 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1938#else
1939 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1940#endif
1941
1942 /* Initialize recognition, indicating that volatile is OK. */
1943 init_recog ();
1944
1945 /* Scan through all the insns, instantiating every virtual register still
1946 present. */
1947 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1948 if (INSN_P (insn))
1949 {
1950 /* These patterns in the instruction stream can never be recognized.
1951 Fortunately, they shouldn't contain virtual registers either. */
1952 if (GET_CODE (PATTERN (insn)) == USE
1953 || GET_CODE (PATTERN (insn)) == CLOBBER
1954 || GET_CODE (PATTERN (insn)) == ASM_INPUT
1955 || DEBUG_MARKER_INSN_P (insn))
1956 continue;
1957 else if (DEBUG_BIND_INSN_P (insn))
1958 instantiate_virtual_regs_in_rtx (INSN_VAR_LOCATION_PTR (insn));
1959 else
1960 instantiate_virtual_regs_in_insn (insn);
1961
1962 if (insn->deleted ())
1963 continue;
1964
1965 instantiate_virtual_regs_in_rtx (&REG_NOTES (insn));
1966
1967 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1968 if (CALL_P (insn))
1969 instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn));
1970 }
1971
1972 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1973 instantiate_decls (current_function_decl);
1974
1975 targetm.instantiate_decls ();
1976
1977 /* Indicate that, from now on, assign_stack_local should use
1978 frame_pointer_rtx. */
1979 virtuals_instantiated = 1;
1980
1981 return 0;
1982}
1983
1984namespace {
1985
1986const pass_data pass_data_instantiate_virtual_regs =
1987{
1988 RTL_PASS, /* type */
1989 "vregs", /* name */
1990 OPTGROUP_NONE, /* optinfo_flags */
1991 TV_NONE, /* tv_id */
1992 0, /* properties_required */
1993 0, /* properties_provided */
1994 0, /* properties_destroyed */
1995 0, /* todo_flags_start */
1996 0, /* todo_flags_finish */
1997};
1998
1999class pass_instantiate_virtual_regs : public rtl_opt_pass
2000{
2001public:
2002 pass_instantiate_virtual_regs (gcc::context *ctxt)
2003 : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt)
2004 {}
2005
2006 /* opt_pass methods: */
2007 virtual unsigned int execute (function *)
2008 {
2009 return instantiate_virtual_regs ();
2010 }
2011
2012}; // class pass_instantiate_virtual_regs
2013
2014} // anon namespace
2015
2016rtl_opt_pass *
2017make_pass_instantiate_virtual_regs (gcc::context *ctxt)
2018{
2019 return new pass_instantiate_virtual_regs (ctxt);
2020}
2021
2022
2023/* Return 1 if EXP is an aggregate type (or a value with aggregate type).
2024 This means a type for which function calls must pass an address to the
2025 function or get an address back from the function.
2026 EXP may be a type node or an expression (whose type is tested). */
2027
2028int
2029aggregate_value_p (const_tree exp, const_tree fntype)
2030{
2031 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
2032 int i, regno, nregs;
2033 rtx reg;
2034
2035 if (fntype)
2036 switch (TREE_CODE (fntype))
2037 {
2038 case CALL_EXPR:
2039 {
2040 tree fndecl = get_callee_fndecl (fntype);
2041 if (fndecl)
2042 fntype = TREE_TYPE (fndecl);
2043 else if (CALL_EXPR_FN (fntype))
2044 fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype)));
2045 else
2046 /* For internal functions, assume nothing needs to be
2047 returned in memory. */
2048 return 0;
2049 }
2050 break;
2051 case FUNCTION_DECL:
2052 fntype = TREE_TYPE (fntype);
2053 break;
2054 case FUNCTION_TYPE:
2055 case METHOD_TYPE:
2056 break;
2057 case IDENTIFIER_NODE:
2058 fntype = NULL_TREE;
2059 break;
2060 default:
2061 /* We don't expect other tree types here. */
2062 gcc_unreachable ();
2063 }
2064
2065 if (VOID_TYPE_P (type))
2066 return 0;
2067
2068 /* If a record should be passed the same as its first (and only) member
2069 don't pass it as an aggregate. */
2070 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2071 return aggregate_value_p (first_field (type), fntype);
2072
2073 /* If the front end has decided that this needs to be passed by
2074 reference, do so. */
2075 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2076 && DECL_BY_REFERENCE (exp))
2077 return 1;
2078
2079 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2080 if (fntype && TREE_ADDRESSABLE (fntype))
2081 return 1;
2082
2083 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2084 and thus can't be returned in registers. */
2085 if (TREE_ADDRESSABLE (type))
2086 return 1;
2087
2088 if (TYPE_EMPTY_P (type))
2089 return 0;
2090
2091 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
2092 return 1;
2093
2094 if (targetm.calls.return_in_memory (type, fntype))
2095 return 1;
2096
2097 /* Make sure we have suitable call-clobbered regs to return
2098 the value in; if not, we must return it in memory. */
2099 reg = hard_function_value (type, 0, fntype, 0);
2100
2101 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2102 it is OK. */
2103 if (!REG_P (reg))
2104 return 0;
2105
2106 regno = REGNO (reg);
2107 nregs = hard_regno_nregs (regno, TYPE_MODE (type));
2108 for (i = 0; i < nregs; i++)
2109 if (! call_used_regs[regno + i])
2110 return 1;
2111
2112 return 0;
2113}
2114
2115/* Return true if we should assign DECL a pseudo register; false if it
2116 should live on the local stack. */
2117
2118bool
2119use_register_for_decl (const_tree decl)
2120{
2121 if (TREE_CODE (decl) == SSA_NAME)
2122 {
2123 /* We often try to use the SSA_NAME, instead of its underlying
2124 decl, to get type information and guide decisions, to avoid
2125 differences of behavior between anonymous and named
2126 variables, but in this one case we have to go for the actual
2127 variable if there is one. The main reason is that, at least
2128 at -O0, we want to place user variables on the stack, but we
2129 don't mind using pseudos for anonymous or ignored temps.
2130 Should we take the SSA_NAME, we'd conclude all SSA_NAMEs
2131 should go in pseudos, whereas their corresponding variables
2132 might have to go on the stack. So, disregarding the decl
2133 here would negatively impact debug info at -O0, enable
2134 coalescing between SSA_NAMEs that ought to get different
2135 stack/pseudo assignments, and get the incoming argument
2136 processing thoroughly confused by PARM_DECLs expected to live
2137 in stack slots but assigned to pseudos. */
2138 if (!SSA_NAME_VAR (decl))
2139 return TYPE_MODE (TREE_TYPE (decl)) != BLKmode
2140 && !(flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)));
2141
2142 decl = SSA_NAME_VAR (decl);
2143 }
2144
2145 /* Honor volatile. */
2146 if (TREE_SIDE_EFFECTS (decl))
2147 return false;
2148
2149 /* Honor addressability. */
2150 if (TREE_ADDRESSABLE (decl))
2151 return false;
2152
2153 /* RESULT_DECLs are a bit special in that they're assigned without
2154 regard to use_register_for_decl, but we generally only store in
2155 them. If we coalesce their SSA NAMEs, we'd better return a
2156 result that matches the assignment in expand_function_start. */
2157 if (TREE_CODE (decl) == RESULT_DECL)
2158 {
2159 /* If it's not an aggregate, we're going to use a REG or a
2160 PARALLEL containing a REG. */
2161 if (!aggregate_value_p (decl, current_function_decl))
2162 return true;
2163
2164 /* If expand_function_start determines the return value, we'll
2165 use MEM if it's not by reference. */
2166 if (cfun->returns_pcc_struct
2167 || (targetm.calls.struct_value_rtx
2168 (TREE_TYPE (current_function_decl), 1)))
2169 return DECL_BY_REFERENCE (decl);
2170
2171 /* Otherwise, we're taking an extra all.function_result_decl
2172 argument. It's set up in assign_parms_augmented_arg_list,
2173 under the (negated) conditions above, and then it's used to
2174 set up the RESULT_DECL rtl in assign_params, after looping
2175 over all parameters. Now, if the RESULT_DECL is not by
2176 reference, we'll use a MEM either way. */
2177 if (!DECL_BY_REFERENCE (decl))
2178 return false;
2179
2180 /* Otherwise, if RESULT_DECL is DECL_BY_REFERENCE, it will take
2181 the function_result_decl's assignment. Since it's a pointer,
2182 we can short-circuit a number of the tests below, and we must
2183 duplicat e them because we don't have the
2184 function_result_decl to test. */
2185 if (!targetm.calls.allocate_stack_slots_for_args ())
2186 return true;
2187 /* We don't set DECL_IGNORED_P for the function_result_decl. */
2188 if (optimize)
2189 return true;
2190 /* We don't set DECL_REGISTER for the function_result_decl. */
2191 return false;
2192 }
2193
2194 /* Decl is implicitly addressible by bound stores and loads
2195 if it is an aggregate holding bounds. */
2196 if (chkp_function_instrumented_p (current_function_decl)
2197 && TREE_TYPE (decl)
2198 && !BOUNDED_P (decl)
2199 && chkp_type_has_pointer (TREE_TYPE (decl)))
2200 return false;
2201
2202 /* Only register-like things go in registers. */
2203 if (DECL_MODE (decl) == BLKmode)
2204 return false;
2205
2206 /* If -ffloat-store specified, don't put explicit float variables
2207 into registers. */
2208 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2209 propagates values across these stores, and it probably shouldn't. */
2210 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2211 return false;
2212
2213 if (!targetm.calls.allocate_stack_slots_for_args ())
2214 return true;
2215
2216 /* If we're not interested in tracking debugging information for
2217 this decl, then we can certainly put it in a register. */
2218 if (DECL_IGNORED_P (decl))
2219 return true;
2220
2221 if (optimize)
2222 return true;
2223
2224 if (!DECL_REGISTER (decl))
2225 return false;
2226
2227 /* When not optimizing, disregard register keyword for types that
2228 could have methods, otherwise the methods won't be callable from
2229 the debugger. */
2230 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (decl)))
2231 return false;
2232
2233 return true;
2234}
2235
2236/* Structures to communicate between the subroutines of assign_parms.
2237 The first holds data persistent across all parameters, the second
2238 is cleared out for each parameter. */
2239
2240struct assign_parm_data_all
2241{
2242 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2243 should become a job of the target or otherwise encapsulated. */
2244 CUMULATIVE_ARGS args_so_far_v;
2245 cumulative_args_t args_so_far;
2246 struct args_size stack_args_size;
2247 tree function_result_decl;
2248 tree orig_fnargs;
2249 rtx_insn *first_conversion_insn;
2250 rtx_insn *last_conversion_insn;
2251 HOST_WIDE_INT pretend_args_size;
2252 HOST_WIDE_INT extra_pretend_bytes;
2253 int reg_parm_stack_space;
2254};
2255
2256struct assign_parm_data_one
2257{
2258 tree nominal_type;
2259 tree passed_type;
2260 rtx entry_parm;
2261 rtx stack_parm;
2262 machine_mode nominal_mode;
2263 machine_mode passed_mode;
2264 machine_mode promoted_mode;
2265 struct locate_and_pad_arg_data locate;
2266 int partial;
2267 BOOL_BITFIELD named_arg : 1;
2268 BOOL_BITFIELD passed_pointer : 1;
2269 BOOL_BITFIELD on_stack : 1;
2270 BOOL_BITFIELD loaded_in_reg : 1;
2271};
2272
2273struct bounds_parm_data
2274{
2275 assign_parm_data_one parm_data;
2276 tree bounds_parm;
2277 tree ptr_parm;
2278 rtx ptr_entry;
2279 int bound_no;
2280};
2281
2282/* A subroutine of assign_parms. Initialize ALL. */
2283
2284static void
2285assign_parms_initialize_all (struct assign_parm_data_all *all)
2286{
2287 tree fntype ATTRIBUTE_UNUSED;
2288
2289 memset (all, 0, sizeof (*all));
2290
2291 fntype = TREE_TYPE (current_function_decl);
2292
2293#ifdef INIT_CUMULATIVE_INCOMING_ARGS
2294 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
2295#else
2296 INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
2297 current_function_decl, -1);
2298#endif
2299 all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
2300
2301#ifdef INCOMING_REG_PARM_STACK_SPACE
2302 all->reg_parm_stack_space
2303 = INCOMING_REG_PARM_STACK_SPACE (current_function_decl);
2304#endif
2305}
2306
2307/* If ARGS contains entries with complex types, split the entry into two
2308 entries of the component type. Return a new list of substitutions are
2309 needed, else the old list. */
2310
2311static void
2312split_complex_args (vec<tree> *args)
2313{
2314 unsigned i;
2315 tree p;
2316
2317 FOR_EACH_VEC_ELT (*args, i, p)
2318 {
2319 tree type = TREE_TYPE (p);
2320 if (TREE_CODE (type) == COMPLEX_TYPE
2321 && targetm.calls.split_complex_arg (type))
2322 {
2323 tree decl;
2324 tree subtype = TREE_TYPE (type);
2325 bool addressable = TREE_ADDRESSABLE (p);
2326
2327 /* Rewrite the PARM_DECL's type with its component. */
2328 p = copy_node (p);
2329 TREE_TYPE (p) = subtype;
2330 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2331 SET_DECL_MODE (p, VOIDmode);
2332 DECL_SIZE (p) = NULL;
2333 DECL_SIZE_UNIT (p) = NULL;
2334 /* If this arg must go in memory, put it in a pseudo here.
2335 We can't allow it to go in memory as per normal parms,
2336 because the usual place might not have the imag part
2337 adjacent to the real part. */
2338 DECL_ARTIFICIAL (p) = addressable;
2339 DECL_IGNORED_P (p) = addressable;
2340 TREE_ADDRESSABLE (p) = 0;
2341 layout_decl (p, 0);
2342 (*args)[i] = p;
2343
2344 /* Build a second synthetic decl. */
2345 decl = build_decl (EXPR_LOCATION (p),
2346 PARM_DECL, NULL_TREE, subtype);
2347 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2348 DECL_ARTIFICIAL (decl) = addressable;
2349 DECL_IGNORED_P (decl) = addressable;
2350 layout_decl (decl, 0);
2351 args->safe_insert (++i, decl);
2352 }
2353 }
2354}
2355
2356/* A subroutine of assign_parms. Adjust the parameter list to incorporate
2357 the hidden struct return argument, and (abi willing) complex args.
2358 Return the new parameter list. */
2359
2360static vec<tree>
2361assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2362{
2363 tree fndecl = current_function_decl;
2364 tree fntype = TREE_TYPE (fndecl);
2365 vec<tree> fnargs = vNULL;
2366 tree arg;
2367
2368 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
2369 fnargs.safe_push (arg);
2370
2371 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
2372
2373 /* If struct value address is treated as the first argument, make it so. */
2374 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2375 && ! cfun->returns_pcc_struct
2376 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2377 {
2378 tree type = build_pointer_type (TREE_TYPE (fntype));
2379 tree decl;
2380
2381 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2382 PARM_DECL, get_identifier (".result_ptr"), type);
2383 DECL_ARG_TYPE (decl) = type;
2384 DECL_ARTIFICIAL (decl) = 1;
2385 DECL_NAMELESS (decl) = 1;
2386 TREE_CONSTANT (decl) = 1;
2387 /* We don't set DECL_IGNORED_P or DECL_REGISTER here. If this
2388 changes, the end of the RESULT_DECL handling block in
2389 use_register_for_decl must be adjusted to match. */
2390
2391 DECL_CHAIN (decl) = all->orig_fnargs;
2392 all->orig_fnargs = decl;
2393 fnargs.safe_insert (0, decl);
2394
2395 all->function_result_decl = decl;
2396
2397 /* If function is instrumented then bounds of the
2398 passed structure address is the second argument. */
2399 if (chkp_function_instrumented_p (fndecl))
2400 {
2401 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2402 PARM_DECL, get_identifier (".result_bnd"),
2403 pointer_bounds_type_node);
2404 DECL_ARG_TYPE (decl) = pointer_bounds_type_node;
2405 DECL_ARTIFICIAL (decl) = 1;
2406 DECL_NAMELESS (decl) = 1;
2407 TREE_CONSTANT (decl) = 1;
2408
2409 DECL_CHAIN (decl) = DECL_CHAIN (all->orig_fnargs);
2410 DECL_CHAIN (all->orig_fnargs) = decl;
2411 fnargs.safe_insert (1, decl);
2412 }
2413 }
2414
2415 /* If the target wants to split complex arguments into scalars, do so. */
2416 if (targetm.calls.split_complex_arg)
2417 split_complex_args (&fnargs);
2418
2419 return fnargs;
2420}
2421
2422/* A subroutine of assign_parms. Examine PARM and pull out type and mode
2423 data for the parameter. Incorporate ABI specifics such as pass-by-
2424 reference and type promotion. */
2425
2426static void
2427assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2428 struct assign_parm_data_one *data)
2429{
2430 tree nominal_type, passed_type;
2431 machine_mode nominal_mode, passed_mode, promoted_mode;
2432 int unsignedp;
2433
2434 memset (data, 0, sizeof (*data));
2435
2436 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2437 if (!cfun->stdarg)
2438 data->named_arg = 1; /* No variadic parms. */
2439 else if (DECL_CHAIN (parm))
2440 data->named_arg = 1; /* Not the last non-variadic parm. */
2441 else if (targetm.calls.strict_argument_naming (all->args_so_far))
2442 data->named_arg = 1; /* Only variadic ones are unnamed. */
2443 else
2444 data->named_arg = 0; /* Treat as variadic. */
2445
2446 nominal_type = TREE_TYPE (parm);
2447 passed_type = DECL_ARG_TYPE (parm);
2448
2449 /* Look out for errors propagating this far. Also, if the parameter's
2450 type is void then its value doesn't matter. */
2451 if (TREE_TYPE (parm) == error_mark_node
2452 /* This can happen after weird syntax errors
2453 or if an enum type is defined among the parms. */
2454 || TREE_CODE (parm) != PARM_DECL
2455 || passed_type == NULL
2456 || VOID_TYPE_P (nominal_type))
2457 {
2458 nominal_type = passed_type = void_type_node;
2459 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2460 goto egress;
2461 }
2462
2463 /* Find mode of arg as it is passed, and mode of arg as it should be
2464 during execution of this function. */
2465 passed_mode = TYPE_MODE (passed_type);
2466 nominal_mode = TYPE_MODE (nominal_type);
2467
2468 /* If the parm is to be passed as a transparent union or record, use the
2469 type of the first field for the tests below. We have already verified
2470 that the modes are the same. */
2471 if ((TREE_CODE (passed_type) == UNION_TYPE
2472 || TREE_CODE (passed_type) == RECORD_TYPE)
2473 && TYPE_TRANSPARENT_AGGR (passed_type))
2474 passed_type = TREE_TYPE (first_field (passed_type));
2475
2476 /* See if this arg was passed by invisible reference. */
2477 if (pass_by_reference (&all->args_so_far_v, passed_mode,
2478 passed_type, data->named_arg))
2479 {
2480 passed_type = nominal_type = build_pointer_type (passed_type);
2481 data->passed_pointer = true;
2482 passed_mode = nominal_mode = TYPE_MODE (nominal_type);
2483 }
2484
2485 /* Find mode as it is passed by the ABI. */
2486 unsignedp = TYPE_UNSIGNED (passed_type);
2487 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2488 TREE_TYPE (current_function_decl), 0);
2489
2490 egress:
2491 data->nominal_type = nominal_type;
2492 data->passed_type = passed_type;
2493 data->nominal_mode = nominal_mode;
2494 data->passed_mode = passed_mode;
2495 data->promoted_mode = promoted_mode;
2496}
2497
2498/* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2499
2500static void
2501assign_parms_setup_varargs (struct assign_parm_data_all *all,
2502 struct assign_parm_data_one *data, bool no_rtl)
2503{
2504 int varargs_pretend_bytes = 0;
2505
2506 targetm.calls.setup_incoming_varargs (all->args_so_far,
2507 data->promoted_mode,
2508 data->passed_type,
2509 &varargs_pretend_bytes, no_rtl);
2510
2511 /* If the back-end has requested extra stack space, record how much is
2512 needed. Do not change pretend_args_size otherwise since it may be
2513 nonzero from an earlier partial argument. */
2514 if (varargs_pretend_bytes > 0)
2515 all->pretend_args_size = varargs_pretend_bytes;
2516}
2517
2518/* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2519 the incoming location of the current parameter. */
2520
2521static void
2522assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2523 struct assign_parm_data_one *data)
2524{
2525 HOST_WIDE_INT pretend_bytes = 0;
2526 rtx entry_parm;
2527 bool in_regs;
2528
2529 if (data->promoted_mode == VOIDmode)
2530 {
2531 data->entry_parm = data->stack_parm = const0_rtx;
2532 return;
2533 }
2534
2535 targetm.calls.warn_parameter_passing_abi (all->args_so_far,
2536 data->passed_type);
2537
2538 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
2539 data->promoted_mode,
2540 data->passed_type,
2541 data->named_arg);
2542
2543 if (entry_parm == 0)
2544 data->promoted_mode = data->passed_mode;
2545
2546 /* Determine parm's home in the stack, in case it arrives in the stack
2547 or we should pretend it did. Compute the stack position and rtx where
2548 the argument arrives and its size.
2549
2550 There is one complexity here: If this was a parameter that would
2551 have been passed in registers, but wasn't only because it is
2552 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2553 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2554 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2555 as it was the previous time. */
2556 in_regs = (entry_parm != 0) || POINTER_BOUNDS_TYPE_P (data->passed_type);
2557#ifdef STACK_PARMS_IN_REG_PARM_AREA
2558 in_regs = true;
2559#endif
2560 if (!in_regs && !data->named_arg)
2561 {
2562 if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
2563 {
2564 rtx tem;
2565 tem = targetm.calls.function_incoming_arg (all->args_so_far,
2566 data->promoted_mode,
2567 data->passed_type, true);
2568 in_regs = tem != NULL;
2569 }
2570 }
2571
2572 /* If this parameter was passed both in registers and in the stack, use
2573 the copy on the stack. */
2574 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2575 data->passed_type))
2576 entry_parm = 0;
2577
2578 if (entry_parm)
2579 {
2580 int partial;
2581
2582 partial = targetm.calls.arg_partial_bytes (all->args_so_far,
2583 data->promoted_mode,
2584 data->passed_type,
2585 data->named_arg);
2586 data->partial = partial;
2587
2588 /* The caller might already have allocated stack space for the
2589 register parameters. */
2590 if (partial != 0 && all->reg_parm_stack_space == 0)
2591 {
2592 /* Part of this argument is passed in registers and part
2593 is passed on the stack. Ask the prologue code to extend
2594 the stack part so that we can recreate the full value.
2595
2596 PRETEND_BYTES is the size of the registers we need to store.
2597 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2598 stack space that the prologue should allocate.
2599
2600 Internally, gcc assumes that the argument pointer is aligned
2601 to STACK_BOUNDARY bits. This is used both for alignment
2602 optimizations (see init_emit) and to locate arguments that are
2603 aligned to more than PARM_BOUNDARY bits. We must preserve this
2604 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2605 a stack boundary. */
2606
2607 /* We assume at most one partial arg, and it must be the first
2608 argument on the stack. */
2609 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2610
2611 pretend_bytes = partial;
2612 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2613
2614 /* We want to align relative to the actual stack pointer, so
2615 don't include this in the stack size until later. */
2616 all->extra_pretend_bytes = all->pretend_args_size;
2617 }
2618 }
2619
2620 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2621 all->reg_parm_stack_space,
2622 entry_parm ? data->partial : 0, current_function_decl,
2623 &all->stack_args_size, &data->locate);
2624
2625 /* Update parm_stack_boundary if this parameter is passed in the
2626 stack. */
2627 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2628 crtl->parm_stack_boundary = data->locate.boundary;
2629
2630 /* Adjust offsets to include the pretend args. */
2631 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2632 data->locate.slot_offset.constant += pretend_bytes;
2633 data->locate.offset.constant += pretend_bytes;
2634
2635 data->entry_parm = entry_parm;
2636}
2637
2638/* A subroutine of assign_parms. If there is actually space on the stack
2639 for this parm, count it in stack_args_size and return true. */
2640
2641static bool
2642assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2643 struct assign_parm_data_one *data)
2644{
2645 /* Bounds are never passed on the stack to keep compatibility
2646 with not instrumented code. */
2647 if (POINTER_BOUNDS_TYPE_P (data->passed_type))
2648 return false;
2649 /* Trivially true if we've no incoming register. */
2650 else if (data->entry_parm == NULL)
2651 ;
2652 /* Also true if we're partially in registers and partially not,
2653 since we've arranged to drop the entire argument on the stack. */
2654 else if (data->partial != 0)
2655 ;
2656 /* Also true if the target says that it's passed in both registers
2657 and on the stack. */
2658 else if (GET_CODE (data->entry_parm) == PARALLEL
2659 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2660 ;
2661 /* Also true if the target says that there's stack allocated for
2662 all register parameters. */
2663 else if (all->reg_parm_stack_space > 0)
2664 ;
2665 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2666 else
2667 return false;
2668
2669 all->stack_args_size.constant += data->locate.size.constant;
2670 if (data->locate.size.var)
2671 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2672
2673 return true;
2674}
2675
2676/* A subroutine of assign_parms. Given that this parameter is allocated
2677 stack space by the ABI, find it. */
2678
2679static void
2680assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2681{
2682 rtx offset_rtx, stack_parm;
2683 unsigned int align, boundary;
2684
2685 /* If we're passing this arg using a reg, make its stack home the
2686 aligned stack slot. */
2687 if (data->entry_parm)
2688 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2689 else
2690 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2691
2692 stack_parm = crtl->args.internal_arg_pointer;
2693 if (offset_rtx != const0_rtx)
2694 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2695 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2696
2697 if (!data->passed_pointer)
2698 {
2699 set_mem_attributes (stack_parm, parm, 1);
2700 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2701 while promoted mode's size is needed. */
2702 if (data->promoted_mode != BLKmode
2703 && data->promoted_mode != DECL_MODE (parm))
2704 {
2705 set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
2706 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
2707 {
2708 int offset = subreg_lowpart_offset (DECL_MODE (parm),
2709 data->promoted_mode);
2710 if (offset)
2711 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
2712 }
2713 }
2714 }
2715
2716 boundary = data->locate.boundary;
2717 align = BITS_PER_UNIT;
2718
2719 /* If we're padding upward, we know that the alignment of the slot
2720 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2721 intentionally forcing upward padding. Otherwise we have to come
2722 up with a guess at the alignment based on OFFSET_RTX. */
2723 if (data->locate.where_pad != PAD_DOWNWARD || data->entry_parm)
2724 align = boundary;
2725 else if (CONST_INT_P (offset_rtx))
2726 {
2727 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2728 align = least_bit_hwi (align);
2729 }
2730 set_mem_align (stack_parm, align);
2731
2732 if (data->entry_parm)
2733 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2734
2735 data->stack_parm = stack_parm;
2736}
2737
2738/* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2739 always valid and contiguous. */
2740
2741static void
2742assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2743{
2744 rtx entry_parm = data->entry_parm;
2745 rtx stack_parm = data->stack_parm;
2746
2747 /* If this parm was passed part in regs and part in memory, pretend it
2748 arrived entirely in memory by pushing the register-part onto the stack.
2749 In the special case of a DImode or DFmode that is split, we could put
2750 it together in a pseudoreg directly, but for now that's not worth
2751 bothering with. */
2752 if (data->partial != 0)
2753 {
2754 /* Handle calls that pass values in multiple non-contiguous
2755 locations. The Irix 6 ABI has examples of this. */
2756 if (GET_CODE (entry_parm) == PARALLEL)
2757 emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm,
2758 data->passed_type,
2759 int_size_in_bytes (data->passed_type));
2760 else
2761 {
2762 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2763 move_block_from_reg (REGNO (entry_parm),
2764 validize_mem (copy_rtx (stack_parm)),
2765 data->partial / UNITS_PER_WORD);
2766 }
2767
2768 entry_parm = stack_parm;
2769 }
2770
2771 /* If we didn't decide this parm came in a register, by default it came
2772 on the stack. */
2773 else if (entry_parm == NULL)
2774 entry_parm = stack_parm;
2775
2776 /* When an argument is passed in multiple locations, we can't make use
2777 of this information, but we can save some copying if the whole argument
2778 is passed in a single register. */
2779 else if (GET_CODE (entry_parm) == PARALLEL
2780 && data->nominal_mode != BLKmode
2781 && data->passed_mode != BLKmode)
2782 {
2783 size_t i, len = XVECLEN (entry_parm, 0);
2784
2785 for (i = 0; i < len; i++)
2786 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2787 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2788 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2789 == data->passed_mode)
2790 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2791 {
2792 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2793 break;
2794 }
2795 }
2796
2797 data->entry_parm = entry_parm;
2798}
2799
2800/* A subroutine of assign_parms. Reconstitute any values which were
2801 passed in multiple registers and would fit in a single register. */
2802
2803static void
2804assign_parm_remove_parallels (struct assign_parm_data_one *data)
2805{
2806 rtx entry_parm = data->entry_parm;
2807
2808 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2809 This can be done with register operations rather than on the
2810 stack, even if we will store the reconstituted parameter on the
2811 stack later. */
2812 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2813 {
2814 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2815 emit_group_store (parmreg, entry_parm, data->passed_type,
2816 GET_MODE_SIZE (GET_MODE (entry_parm)));
2817 entry_parm = parmreg;
2818 }
2819
2820 data->entry_parm = entry_parm;
2821}
2822
2823/* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2824 always valid and properly aligned. */
2825
2826static void
2827assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2828{
2829 rtx stack_parm = data->stack_parm;
2830
2831 /* If we can't trust the parm stack slot to be aligned enough for its
2832 ultimate type, don't use that slot after entry. We'll make another
2833 stack slot, if we need one. */
2834 if (stack_parm
2835 && ((STRICT_ALIGNMENT
2836 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2837 || (data->nominal_type
2838 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2839 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2840 stack_parm = NULL;
2841
2842 /* If parm was passed in memory, and we need to convert it on entry,
2843 don't store it back in that same slot. */
2844 else if (data->entry_parm == stack_parm
2845 && data->nominal_mode != BLKmode
2846 && data->nominal_mode != data->passed_mode)
2847 stack_parm = NULL;
2848
2849 /* If stack protection is in effect for this function, don't leave any
2850 pointers in their passed stack slots. */
2851 else if (crtl->stack_protect_guard
2852 && (flag_stack_protect == 2
2853 || data->passed_pointer
2854 || POINTER_TYPE_P (data->nominal_type)))
2855 stack_parm = NULL;
2856
2857 data->stack_parm = stack_parm;
2858}
2859
2860/* A subroutine of assign_parms. Return true if the current parameter
2861 should be stored as a BLKmode in the current frame. */
2862
2863static bool
2864assign_parm_setup_block_p (struct assign_parm_data_one *data)
2865{
2866 if (data->nominal_mode == BLKmode)
2867 return true;
2868 if (GET_MODE (data->entry_parm) == BLKmode)
2869 return true;
2870
2871#ifdef BLOCK_REG_PADDING
2872 /* Only assign_parm_setup_block knows how to deal with register arguments
2873 that are padded at the least significant end. */
2874 if (REG_P (data->entry_parm)
2875 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2876 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2877 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
2878 return true;
2879#endif
2880
2881 return false;
2882}
2883
2884/* A subroutine of assign_parms. Arrange for the parameter to be
2885 present and valid in DATA->STACK_RTL. */
2886
2887static void
2888assign_parm_setup_block (struct assign_parm_data_all *all,
2889 tree parm, struct assign_parm_data_one *data)
2890{
2891 rtx entry_parm = data->entry_parm;
2892 rtx stack_parm = data->stack_parm;
2893 rtx target_reg = NULL_RTX;
2894 bool in_conversion_seq = false;
2895 HOST_WIDE_INT size;
2896 HOST_WIDE_INT size_stored;
2897
2898 if (GET_CODE (entry_parm) == PARALLEL)
2899 entry_parm = emit_group_move_into_temps (entry_parm);
2900
2901 /* If we want the parameter in a pseudo, don't use a stack slot. */
2902 if (is_gimple_reg (parm) && use_register_for_decl (parm))
2903 {
2904 tree def = ssa_default_def (cfun, parm);
2905 gcc_assert (def);
2906 machine_mode mode = promote_ssa_mode (def, NULL);
2907 rtx reg = gen_reg_rtx (mode);
2908 if (GET_CODE (reg) != CONCAT)
2909 stack_parm = reg;
2910 else
2911 {
2912 target_reg = reg;
2913 /* Avoid allocating a stack slot, if there isn't one
2914 preallocated by the ABI. It might seem like we should
2915 always prefer a pseudo, but converting between
2916 floating-point and integer modes goes through the stack
2917 on various machines, so it's better to use the reserved
2918 stack slot than to risk wasting it and allocating more
2919 for the conversion. */
2920 if (stack_parm == NULL_RTX)
2921 {
2922 int save = generating_concat_p;
2923 generating_concat_p = 0;
2924 stack_parm = gen_reg_rtx (mode);
2925 generating_concat_p = save;
2926 }
2927 }
2928 data->stack_parm = NULL;
2929 }
2930
2931 size = int_size_in_bytes (data->passed_type);
2932 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2933 if (stack_parm == 0)
2934 {
2935 SET_DECL_ALIGN (parm, MAX (DECL_ALIGN (parm), BITS_PER_WORD));
2936 stack_parm = assign_stack_local (BLKmode, size_stored,
2937 DECL_ALIGN (parm));
2938 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2939 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2940 set_mem_attributes (stack_parm, parm, 1);
2941 }
2942
2943 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2944 calls that pass values in multiple non-contiguous locations. */
2945 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2946 {
2947 rtx mem;
2948
2949 /* Note that we will be storing an integral number of words.
2950 So we have to be careful to ensure that we allocate an
2951 integral number of words. We do this above when we call
2952 assign_stack_local if space was not allocated in the argument
2953 list. If it was, this will not work if PARM_BOUNDARY is not
2954 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2955 if it becomes a problem. Exception is when BLKmode arrives
2956 with arguments not conforming to word_mode. */
2957
2958 if (data->stack_parm == 0)
2959 ;
2960 else if (GET_CODE (entry_parm) == PARALLEL)
2961 ;
2962 else
2963 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2964
2965 mem = validize_mem (copy_rtx (stack_parm));
2966
2967 /* Handle values in multiple non-contiguous locations. */
2968 if (GET_CODE (entry_parm) == PARALLEL && !MEM_P (mem))
2969 emit_group_store (mem, entry_parm, data->passed_type, size);
2970 else if (GET_CODE (entry_parm) == PARALLEL)
2971 {
2972 push_to_sequence2 (all->first_conversion_insn,
2973 all->last_conversion_insn);
2974 emit_group_store (mem, entry_parm, data->passed_type, size);
2975 all->first_conversion_insn = get_insns ();
2976 all->last_conversion_insn = get_last_insn ();
2977 end_sequence ();
2978 in_conversion_seq = true;
2979 }
2980
2981 else if (size == 0)
2982 ;
2983
2984 /* If SIZE is that of a mode no bigger than a word, just use
2985 that mode's store operation. */
2986 else if (size <= UNITS_PER_WORD)
2987 {
2988 unsigned int bits = size * BITS_PER_UNIT;
2989 machine_mode mode = int_mode_for_size (bits, 0).else_blk ();
2990
2991 if (mode != BLKmode
2992#ifdef BLOCK_REG_PADDING
2993 && (size == UNITS_PER_WORD
2994 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2995 != (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
2996#endif
2997 )
2998 {
2999 rtx reg;
3000
3001 /* We are really truncating a word_mode value containing
3002 SIZE bytes into a value of mode MODE. If such an
3003 operation requires no actual instructions, we can refer
3004 to the value directly in mode MODE, otherwise we must
3005 start with the register in word_mode and explicitly
3006 convert it. */
3007 if (targetm.truly_noop_truncation (size * BITS_PER_UNIT,
3008 BITS_PER_WORD))
3009 reg = gen_rtx_REG (mode, REGNO (entry_parm));
3010 else
3011 {
3012 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
3013 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
3014 }
3015 emit_move_insn (change_address (mem, mode, 0), reg);
3016 }
3017
3018#ifdef BLOCK_REG_PADDING
3019 /* Storing the register in memory as a full word, as
3020 move_block_from_reg below would do, and then using the
3021 MEM in a smaller mode, has the effect of shifting right
3022 if BYTES_BIG_ENDIAN. If we're bypassing memory, the
3023 shifting must be explicit. */
3024 else if (!MEM_P (mem))
3025 {
3026 rtx x;
3027
3028 /* If the assert below fails, we should have taken the
3029 mode != BLKmode path above, unless we have downward
3030 padding of smaller-than-word arguments on a machine
3031 with little-endian bytes, which would likely require
3032 additional changes to work correctly. */
3033 gcc_checking_assert (BYTES_BIG_ENDIAN
3034 && (BLOCK_REG_PADDING (mode,
3035 data->passed_type, 1)
3036 == PAD_UPWARD));
3037
3038 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3039
3040 x = gen_rtx_REG (word_mode, REGNO (entry_parm));
3041 x = expand_shift (RSHIFT_EXPR, word_mode, x, by,
3042 NULL_RTX, 1);
3043 x = force_reg (word_mode, x);
3044 x = gen_lowpart_SUBREG (GET_MODE (mem), x);
3045
3046 emit_move_insn (mem, x);
3047 }
3048#endif
3049
3050 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
3051 machine must be aligned to the left before storing
3052 to memory. Note that the previous test doesn't
3053 handle all cases (e.g. SIZE == 3). */
3054 else if (size != UNITS_PER_WORD
3055#ifdef BLOCK_REG_PADDING
3056 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
3057 == PAD_DOWNWARD)
3058#else
3059 && BYTES_BIG_ENDIAN
3060#endif
3061 )
3062 {
3063 rtx tem, x;
3064 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3065 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
3066
3067 x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
3068 tem = change_address (mem, word_mode, 0);
3069 emit_move_insn (tem, x);
3070 }
3071 else
3072 move_block_from_reg (REGNO (entry_parm), mem,
3073 size_stored / UNITS_PER_WORD);
3074 }
3075 else if (!MEM_P (mem))
3076 {
3077 gcc_checking_assert (size > UNITS_PER_WORD);
3078#ifdef BLOCK_REG_PADDING
3079 gcc_checking_assert (BLOCK_REG_PADDING (GET_MODE (mem),
3080 data->passed_type, 0)
3081 == PAD_UPWARD);
3082#endif
3083 emit_move_insn (mem, entry_parm);
3084 }
3085 else
3086 move_block_from_reg (REGNO (entry_parm), mem,
3087 size_stored / UNITS_PER_WORD);
3088 }
3089 else if (data->stack_parm == 0)
3090 {
3091 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3092 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
3093 BLOCK_OP_NORMAL);
3094 all->first_conversion_insn = get_insns ();
3095 all->last_conversion_insn = get_last_insn ();
3096 end_sequence ();
3097 in_conversion_seq = true;
3098 }
3099
3100 if (target_reg)
3101 {
3102 if (!in_conversion_seq)
3103 emit_move_insn (target_reg, stack_parm);
3104 else
3105 {
3106 push_to_sequence2 (all->first_conversion_insn,
3107 all->last_conversion_insn);
3108 emit_move_insn (target_reg, stack_parm);
3109 all->first_conversion_insn = get_insns ();
3110 all->last_conversion_insn = get_last_insn ();
3111 end_sequence ();
3112 }
3113 stack_parm = target_reg;
3114 }
3115
3116 data->stack_parm = stack_parm;
3117 set_parm_rtl (parm, stack_parm);
3118}
3119
3120/* A subroutine of assign_parms. Allocate a pseudo to hold the current
3121 parameter. Get it there. Perform all ABI specified conversions. */
3122
3123static void
3124assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
3125 struct assign_parm_data_one *data)
3126{
3127 rtx parmreg, validated_mem;
3128 rtx equiv_stack_parm;
3129 machine_mode promoted_nominal_mode;
3130 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
3131 bool did_conversion = false;
3132 bool need_conversion, moved;
3133 rtx rtl;
3134
3135 /* Store the parm in a pseudoregister during the function, but we may
3136 need to do it in a wider mode. Using 2 here makes the result
3137 consistent with promote_decl_mode and thus expand_expr_real_1. */
3138 promoted_nominal_mode
3139 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
3140 TREE_TYPE (current_function_decl), 2);
3141
3142 parmreg = gen_reg_rtx (promoted_nominal_mode);
3143 if (!DECL_ARTIFICIAL (parm))
3144 mark_user_reg (parmreg);
3145
3146 /* If this was an item that we received a pointer to,
3147 set rtl appropriately. */
3148 if (data->passed_pointer)
3149 {
3150 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
3151 set_mem_attributes (rtl, parm, 1);
3152 }
3153 else
3154 rtl = parmreg;
3155
3156 assign_parm_remove_parallels (data);
3157
3158 /* Copy the value into the register, thus bridging between
3159 assign_parm_find_data_types and expand_expr_real_1. */
3160
3161 equiv_stack_parm = data->stack_parm;
3162 validated_mem = validize_mem (copy_rtx (data->entry_parm));
3163
3164 need_conversion = (data->nominal_mode != data->passed_mode
3165 || promoted_nominal_mode != data->promoted_mode);
3166 moved = false;
3167
3168 if (need_conversion
3169 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
3170 && data->nominal_mode == data->passed_mode
3171 && data->nominal_mode == GET_MODE (data->entry_parm))
3172 {
3173 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3174 mode, by the caller. We now have to convert it to
3175 NOMINAL_MODE, if different. However, PARMREG may be in
3176 a different mode than NOMINAL_MODE if it is being stored
3177 promoted.
3178
3179 If ENTRY_PARM is a hard register, it might be in a register
3180 not valid for operating in its mode (e.g., an odd-numbered
3181 register for a DFmode). In that case, moves are the only
3182 thing valid, so we can't do a convert from there. This
3183 occurs when the calling sequence allow such misaligned
3184 usages.
3185
3186 In addition, the conversion may involve a call, which could
3187 clobber parameters which haven't been copied to pseudo
3188 registers yet.
3189
3190 First, we try to emit an insn which performs the necessary
3191 conversion. We verify that this insn does not clobber any
3192 hard registers. */
3193
3194 enum insn_code icode;
3195 rtx op0, op1;
3196
3197 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
3198 unsignedp);
3199
3200 op0 = parmreg;
3201 op1 = validated_mem;
3202 if (icode != CODE_FOR_nothing
3203 && insn_operand_matches (icode, 0, op0)
3204 && insn_operand_matches (icode, 1, op1))
3205 {
3206 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
3207 rtx_insn *insn, *insns;
3208 rtx t = op1;
3209 HARD_REG_SET hardregs;
3210
3211 start_sequence ();
3212 /* If op1 is a hard register that is likely spilled, first
3213 force it into a pseudo, otherwise combiner might extend
3214 its lifetime too much. */
3215 if (GET_CODE (t) == SUBREG)
3216 t = SUBREG_REG (t);
3217 if (REG_P (t)
3218 && HARD_REGISTER_P (t)
3219 && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
3220 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
3221 {
3222 t = gen_reg_rtx (GET_MODE (op1));
3223 emit_move_insn (t, op1);
3224 }
3225 else
3226 t = op1;
3227 rtx_insn *pat = gen_extend_insn (op0, t, promoted_nominal_mode,
3228 data->passed_mode, unsignedp);
3229 emit_insn (pat);
3230 insns = get_insns ();
3231
3232 moved = true;
3233 CLEAR_HARD_REG_SET (hardregs);
3234 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3235 {
3236 if (INSN_P (insn))
3237 note_stores (PATTERN (insn), record_hard_reg_sets,
3238 &hardregs);
3239 if (!hard_reg_set_empty_p (hardregs))
3240 moved = false;
3241 }
3242
3243 end_sequence ();
3244
3245 if (moved)
3246 {
3247 emit_insn (insns);
3248 if (equiv_stack_parm != NULL_RTX)
3249 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3250 equiv_stack_parm);
3251 }
3252 }
3253 }
3254
3255 if (moved)
3256 /* Nothing to do. */
3257 ;
3258 else if (need_conversion)
3259 {
3260 /* We did not have an insn to convert directly, or the sequence
3261 generated appeared unsafe. We must first copy the parm to a
3262 pseudo reg, and save the conversion until after all
3263 parameters have been moved. */
3264
3265 int save_tree_used;
3266 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3267
3268 emit_move_insn (tempreg, validated_mem);
3269
3270 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3271 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3272
3273 if (partial_subreg_p (tempreg)
3274 && GET_MODE (tempreg) == data->nominal_mode
3275 && REG_P (SUBREG_REG (tempreg))
3276 && data->nominal_mode == data->passed_mode
3277 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm))
3278 {
3279 /* The argument is already sign/zero extended, so note it
3280 into the subreg. */
3281 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3282 SUBREG_PROMOTED_SET (tempreg, unsignedp);
3283 }
3284
3285 /* TREE_USED gets set erroneously during expand_assignment. */
3286 save_tree_used = TREE_USED (parm);
3287 SET_DECL_RTL (parm, rtl);
3288 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
3289 SET_DECL_RTL (parm, NULL_RTX);
3290 TREE_USED (parm) = save_tree_used;
3291 all->first_conversion_insn = get_insns ();
3292 all->last_conversion_insn = get_last_insn ();
3293 end_sequence ();
3294
3295 did_conversion = true;
3296 }
3297 else
3298 emit_move_insn (parmreg, validated_mem);
3299
3300 /* If we were passed a pointer but the actual value can safely live
3301 in a register, retrieve it and use it directly. */
3302 if (data->passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode)
3303 {
3304 /* We can't use nominal_mode, because it will have been set to
3305 Pmode above. We must use the actual mode of the parm. */
3306 if (use_register_for_decl (parm))
3307 {
3308 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3309 mark_user_reg (parmreg);
3310 }
3311 else
3312 {
3313 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3314 TYPE_MODE (TREE_TYPE (parm)),
3315 TYPE_ALIGN (TREE_TYPE (parm)));
3316 parmreg
3317 = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)),
3318 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))),
3319 align);
3320 set_mem_attributes (parmreg, parm, 1);
3321 }
3322
3323 /* We need to preserve an address based on VIRTUAL_STACK_VARS_REGNUM for
3324 the debug info in case it is not legitimate. */
3325 if (GET_MODE (parmreg) != GET_MODE (rtl))
3326 {
3327 rtx tempreg = gen_reg_rtx (GET_MODE (rtl));
3328 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3329
3330 push_to_sequence2 (all->first_conversion_insn,
3331 all->last_conversion_insn);
3332 emit_move_insn (tempreg, rtl);
3333 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3334 emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg,
3335 tempreg);
3336 all->first_conversion_insn = get_insns ();
3337 all->last_conversion_insn = get_last_insn ();
3338 end_sequence ();
3339
3340 did_conversion = true;
3341 }
3342 else
3343 emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg, rtl);
3344
3345 rtl = parmreg;
3346
3347 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3348 now the parm. */
3349 data->stack_parm = NULL;
3350 }
3351
3352 set_parm_rtl (parm, rtl);
3353
3354 /* Mark the register as eliminable if we did no conversion and it was
3355 copied from memory at a fixed offset, and the arg pointer was not
3356 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3357 offset formed an invalid address, such memory-equivalences as we
3358 make here would screw up life analysis for it. */
3359 if (data->nominal_mode == data->passed_mode
3360 && !did_conversion
3361 && data->stack_parm != 0
3362 && MEM_P (data->stack_parm)
3363 && data->locate.offset.var == 0
3364 && reg_mentioned_p (virtual_incoming_args_rtx,
3365 XEXP (data->stack_parm, 0)))
3366 {
3367 rtx_insn *linsn = get_last_insn ();
3368 rtx_insn *sinsn;
3369 rtx set;
3370
3371 /* Mark complex types separately. */
3372 if (GET_CODE (parmreg) == CONCAT)
3373 {
3374 scalar_mode submode = GET_MODE_INNER (GET_MODE (parmreg));
3375 int regnor = REGNO (XEXP (parmreg, 0));
3376 int regnoi = REGNO (XEXP (parmreg, 1));
3377 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3378 rtx stacki = adjust_address_nv (data->stack_parm, submode,
3379 GET_MODE_SIZE (submode));
3380
3381 /* Scan backwards for the set of the real and
3382 imaginary parts. */
3383 for (sinsn = linsn; sinsn != 0;
3384 sinsn = prev_nonnote_insn (sinsn))
3385 {
3386 set = single_set (sinsn);
3387 if (set == 0)
3388 continue;
3389
3390 if (SET_DEST (set) == regno_reg_rtx [regnoi])
3391 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
3392 else if (SET_DEST (set) == regno_reg_rtx [regnor])
3393 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
3394 }
3395 }
3396 else
3397 set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
3398 }
3399
3400 /* For pointer data type, suggest pointer register. */
3401 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3402 mark_reg_pointer (parmreg,
3403 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3404}
3405
3406/* A subroutine of assign_parms. Allocate stack space to hold the current
3407 parameter. Get it there. Perform all ABI specified conversions. */
3408
3409static void
3410assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3411 struct assign_parm_data_one *data)
3412{
3413 /* Value must be stored in the stack slot STACK_PARM during function
3414 execution. */
3415 bool to_conversion = false;
3416
3417 assign_parm_remove_parallels (data);
3418
3419 if (data->promoted_mode != data->nominal_mode)
3420 {
3421 /* Conversion is required. */
3422 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3423
3424 emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm)));
3425
3426 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3427 to_conversion = true;
3428
3429 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3430 TYPE_UNSIGNED (TREE_TYPE (parm)));
3431
3432 if (data->stack_parm)
3433 {
3434 int offset = subreg_lowpart_offset (data->nominal_mode,
3435 GET_MODE (data->stack_parm));
3436 /* ??? This may need a big-endian conversion on sparc64. */
3437 data->stack_parm
3438 = adjust_address (data->stack_parm, data->nominal_mode, 0);
3439 if (offset && MEM_OFFSET_KNOWN_P (data->stack_parm))
3440 set_mem_offset (data->stack_parm,
3441 MEM_OFFSET (data->stack_parm) + offset);
3442 }
3443 }
3444
3445 if (data->entry_parm != data->stack_parm)
3446 {
3447 rtx src, dest;
3448
3449 if (data->stack_parm == 0)
3450 {
3451 int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3452 GET_MODE (data->entry_parm),
3453 TYPE_ALIGN (data->passed_type));
3454 data->stack_parm
3455 = assign_stack_local (GET_MODE (data->entry_parm),
3456 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3457 align);
3458 set_mem_attributes (data->stack_parm, parm, 1);
3459 }
3460
3461 dest = validize_mem (copy_rtx (data->stack_parm));
3462 src = validize_mem (copy_rtx (data->entry_parm));
3463
3464 if (MEM_P (src))
3465 {
3466 /* Use a block move to handle potentially misaligned entry_parm. */
3467 if (!to_conversion)
3468 push_to_sequence2 (all->first_conversion_insn,
3469 all->last_conversion_insn);
3470 to_conversion = true;
3471
3472 emit_block_move (dest, src,
3473 GEN_INT (int_size_in_bytes (data->passed_type)),
3474 BLOCK_OP_NORMAL);
3475 }
3476 else
3477 {
3478 if (!REG_P (src))
3479 src = force_reg (GET_MODE (src), src);
3480 emit_move_insn (dest, src);
3481 }
3482 }
3483
3484 if (to_conversion)
3485 {
3486 all->first_conversion_insn = get_insns ();
3487 all->last_conversion_insn = get_last_insn ();
3488 end_sequence ();
3489 }
3490
3491 set_parm_rtl (parm, data->stack_parm);
3492}
3493
3494/* A subroutine of assign_parms. If the ABI splits complex arguments, then
3495 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3496
3497static void
3498assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3499 vec<tree> fnargs)
3500{
3501 tree parm;
3502 tree orig_fnargs = all->orig_fnargs;
3503 unsigned i = 0;
3504
3505 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
3506 {
3507 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3508 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3509 {
3510 rtx tmp, real, imag;
3511 scalar_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3512
3513 real = DECL_RTL (fnargs[i]);
3514 imag = DECL_RTL (fnargs[i + 1]);
3515 if (inner != GET_MODE (real))
3516 {
3517 real = gen_lowpart_SUBREG (inner, real);
3518 imag = gen_lowpart_SUBREG (inner, imag);
3519 }
3520
3521 if (TREE_ADDRESSABLE (parm))
3522 {
3523 rtx rmem, imem;
3524 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3525 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3526 DECL_MODE (parm),
3527 TYPE_ALIGN (TREE_TYPE (parm)));
3528
3529 /* split_complex_arg put the real and imag parts in
3530 pseudos. Move them to memory. */
3531 tmp = assign_stack_local (DECL_MODE (parm), size, align);
3532 set_mem_attributes (tmp, parm, 1);
3533 rmem = adjust_address_nv (tmp, inner, 0);
3534 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3535 push_to_sequence2 (all->first_conversion_insn,
3536 all->last_conversion_insn);
3537 emit_move_insn (rmem, real);
3538 emit_move_insn (imem, imag);
3539 all->first_conversion_insn = get_insns ();
3540 all->last_conversion_insn = get_last_insn ();
3541 end_sequence ();
3542 }
3543 else
3544 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3545 set_parm_rtl (parm, tmp);
3546
3547 real = DECL_INCOMING_RTL (fnargs[i]);
3548 imag = DECL_INCOMING_RTL (fnargs[i + 1]);
3549 if (inner != GET_MODE (real))
3550 {
3551 real = gen_lowpart_SUBREG (inner, real);
3552 imag = gen_lowpart_SUBREG (inner, imag);
3553 }
3554 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3555 set_decl_incoming_rtl (parm, tmp, false);
3556 i++;
3557 }
3558 }
3559}
3560
3561/* Load bounds of PARM from bounds table. */
3562static void
3563assign_parm_load_bounds (struct assign_parm_data_one *data,
3564 tree parm,
3565 rtx entry,
3566 unsigned bound_no)
3567{
3568 bitmap_iterator bi;
3569 unsigned i, offs = 0;
3570 int bnd_no = -1;
3571 rtx slot = NULL, ptr = NULL;
3572
3573 if (parm)
3574 {
3575 bitmap slots;
3576 bitmap_obstack_initialize (NULL);
3577 slots = BITMAP_ALLOC (NULL);
3578 chkp_find_bound_slots (TREE_TYPE (parm), slots);
3579 EXECUTE_IF_SET_IN_BITMAP (slots, 0, i, bi)
3580 {
3581 if (bound_no)
3582 bound_no--;
3583 else
3584 {
3585 bnd_no = i;
3586 break;
3587 }
3588 }
3589 BITMAP_FREE (slots);
3590 bitmap_obstack_release (NULL);
3591 }
3592
3593 /* We may have bounds not associated with any pointer. */
3594 if (bnd_no != -1)
3595 offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
3596
3597 /* Find associated pointer. */
3598 if (bnd_no == -1)
3599 {
3600 /* If bounds are not associated with any bounds,
3601 then it is passed in a register or special slot. */
3602 gcc_assert (data->entry_parm);
3603 ptr = const0_rtx;
3604 }
3605 else if (MEM_P (entry))
3606 slot = adjust_address (entry, Pmode, offs);
3607 else if (REG_P (entry))
3608 ptr = gen_rtx_REG (Pmode, REGNO (entry) + bnd_no);
3609 else if (GET_CODE (entry) == PARALLEL)
3610 ptr = chkp_get_value_with_offs (entry, GEN_INT (offs));
3611 else
3612 gcc_unreachable ();
3613 data->entry_parm = targetm.calls.load_bounds_for_arg (slot, ptr,
3614 data->entry_parm);
3615}
3616
3617/* Assign RTL expressions to the function's bounds parameters BNDARGS. */
3618
3619static void
3620assign_bounds (vec<bounds_parm_data> &bndargs,
3621 struct assign_parm_data_all &all,
3622 bool assign_regs, bool assign_special,
3623 bool assign_bt)
3624{
3625 unsigned i, pass;
3626 bounds_parm_data *pbdata;
3627
3628 if (!bndargs.exists ())
3629 return;
3630
3631 /* We make few passes to store input bounds. Firstly handle bounds
3632 passed in registers. After that we load bounds passed in special
3633 slots. Finally we load bounds from Bounds Table. */
3634 for (pass = 0; pass < 3; pass++)
3635 FOR_EACH_VEC_ELT (bndargs, i, pbdata)
3636 {
3637 /* Pass 0 => regs only. */
3638 if (pass == 0
3639 && (!assign_regs
3640 ||(!pbdata->parm_data.entry_parm
3641 || GET_CODE (pbdata->parm_data.entry_parm) != REG)))
3642 continue;
3643 /* Pass 1 => slots only. */
3644 else if (pass == 1
3645 && (!assign_special
3646 || (!pbdata->parm_data.entry_parm
3647 || GET_CODE (pbdata->parm_data.entry_parm) == REG)))
3648 continue;
3649 /* Pass 2 => BT only. */
3650 else if (pass == 2
3651 && (!assign_bt
3652 || pbdata->parm_data.entry_parm))
3653 continue;
3654
3655 if (!pbdata->parm_data.entry_parm
3656 || GET_CODE (pbdata->parm_data.entry_parm) != REG)
3657 assign_parm_load_bounds (&pbdata->parm_data, pbdata->ptr_parm,
3658 pbdata->ptr_entry, pbdata->bound_no);
3659
3660 set_decl_incoming_rtl (pbdata->bounds_parm,
3661 pbdata->parm_data.entry_parm, false);
3662
3663 if (assign_parm_setup_block_p (&pbdata->parm_data))
3664 assign_parm_setup_block (&all, pbdata->bounds_parm,
3665 &pbdata->parm_data);
3666 else if (pbdata->parm_data.passed_pointer
3667 || use_register_for_decl (pbdata->bounds_parm))
3668 assign_parm_setup_reg (&all, pbdata->bounds_parm,
3669 &pbdata->parm_data);
3670 else
3671 assign_parm_setup_stack (&all, pbdata->bounds_parm,
3672 &pbdata->parm_data);
3673 }
3674}
3675
3676/* Assign RTL expressions to the function's parameters. This may involve
3677 copying them into registers and using those registers as the DECL_RTL. */
3678
3679static void
3680assign_parms (tree fndecl)
3681{
3682 struct assign_parm_data_all all;
3683 tree parm;
3684 vec<tree> fnargs;
3685 unsigned i, bound_no = 0;
3686 tree last_arg = NULL;
3687 rtx last_arg_entry = NULL;
3688 vec<bounds_parm_data> bndargs = vNULL;
3689 bounds_parm_data bdata;
3690
3691 crtl->args.internal_arg_pointer
3692 = targetm.calls.internal_arg_pointer ();
3693
3694 assign_parms_initialize_all (&all);
3695 fnargs = assign_parms_augmented_arg_list (&all);
3696
3697 FOR_EACH_VEC_ELT (fnargs, i, parm)
3698 {
3699 struct assign_parm_data_one data;
3700
3701 /* Extract the type of PARM; adjust it according to ABI. */
3702 assign_parm_find_data_types (&all, parm, &data);
3703
3704 /* Early out for errors and void parameters. */
3705 if (data.passed_mode == VOIDmode)
3706 {
3707 SET_DECL_RTL (parm, const0_rtx);
3708 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3709 continue;
3710 }
3711
3712 /* Estimate stack alignment from parameter alignment. */
3713 if (SUPPORTS_STACK_ALIGNMENT)
3714 {
3715 unsigned int align
3716 = targetm.calls.function_arg_boundary (data.promoted_mode,
3717 data.passed_type);
3718 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3719 align);
3720 if (TYPE_ALIGN (data.nominal_type) > align)
3721 align = MINIMUM_ALIGNMENT (data.nominal_type,
3722 TYPE_MODE (data.nominal_type),
3723 TYPE_ALIGN (data.nominal_type));
3724 if (crtl->stack_alignment_estimated < align)
3725 {
3726 gcc_assert (!crtl->stack_realign_processed);
3727 crtl->stack_alignment_estimated = align;
3728 }
3729 }
3730
3731 /* Find out where the parameter arrives in this function. */
3732 assign_parm_find_entry_rtl (&all, &data);
3733
3734 /* Find out where stack space for this parameter might be. */
3735 if (assign_parm_is_stack_parm (&all, &data))
3736 {
3737 assign_parm_find_stack_rtl (parm, &data);
3738 assign_parm_adjust_entry_rtl (&data);
3739 }
3740 if (!POINTER_BOUNDS_TYPE_P (data.passed_type))
3741 {
3742 /* Remember where last non bounds arg was passed in case
3743 we have to load associated bounds for it from Bounds
3744 Table. */
3745 last_arg = parm;
3746 last_arg_entry = data.entry_parm;
3747 bound_no = 0;
3748 }
3749 /* Record permanently how this parm was passed. */
3750 if (data.passed_pointer)
3751 {
3752 rtx incoming_rtl
3753 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)),
3754 data.entry_parm);
3755 set_decl_incoming_rtl (parm, incoming_rtl, true);
3756 }
3757 else
3758 set_decl_incoming_rtl (parm, data.entry_parm, false);
3759
3760 assign_parm_adjust_stack_rtl (&data);
3761
3762 /* Bounds should be loaded in the particular order to
3763 have registers allocated correctly. Collect info about
3764 input bounds and load them later. */
3765 if (POINTER_BOUNDS_TYPE_P (data.passed_type))
3766 {
3767 /* Expect bounds in instrumented functions only. */
3768 gcc_assert (chkp_function_instrumented_p (fndecl));
3769
3770 bdata.parm_data = data;
3771 bdata.bounds_parm = parm;
3772 bdata.ptr_parm = last_arg;
3773 bdata.ptr_entry = last_arg_entry;
3774 bdata.bound_no = bound_no;
3775 bndargs.safe_push (bdata);
3776 }
3777 else
3778 {
3779 if (assign_parm_setup_block_p (&data))
3780 assign_parm_setup_block (&all, parm, &data);
3781 else if (data.passed_pointer || use_register_for_decl (parm))
3782 assign_parm_setup_reg (&all, parm, &data);
3783 else
3784 assign_parm_setup_stack (&all, parm, &data);
3785 }
3786
3787 if (cfun->stdarg && !DECL_CHAIN (parm))
3788 {
3789 int pretend_bytes = 0;
3790
3791 assign_parms_setup_varargs (&all, &data, false);
3792
3793 if (chkp_function_instrumented_p (fndecl))
3794 {
3795 /* We expect this is the last parm. Otherwise it is wrong
3796 to assign bounds right now. */
3797 gcc_assert (i == (fnargs.length () - 1));
3798 assign_bounds (bndargs, all, true, false, false);
3799 targetm.calls.setup_incoming_vararg_bounds (all.args_so_far,
3800 data.promoted_mode,
3801 data.passed_type,
3802 &pretend_bytes,
3803 false);
3804 assign_bounds (bndargs, all, false, true, true);
3805 bndargs.release ();
3806 }
3807 }
3808
3809 /* Update info on where next arg arrives in registers. */
3810 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3811 data.passed_type, data.named_arg);
3812
3813 if (POINTER_BOUNDS_TYPE_P (data.passed_type))
3814 bound_no++;
3815 }
3816
3817 assign_bounds (bndargs, all, true, true, true);
3818 bndargs.release ();
3819
3820 if (targetm.calls.split_complex_arg)
3821 assign_parms_unsplit_complex (&all, fnargs);
3822
3823 fnargs.release ();
3824
3825 /* Output all parameter conversion instructions (possibly including calls)
3826 now that all parameters have been copied out of hard registers. */
3827 emit_insn (all.first_conversion_insn);
3828
3829 /* Estimate reload stack alignment from scalar return mode. */
3830 if (SUPPORTS_STACK_ALIGNMENT)
3831 {
3832 if (DECL_RESULT (fndecl))
3833 {
3834 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3835 machine_mode mode = TYPE_MODE (type);
3836
3837 if (mode != BLKmode
3838 && mode != VOIDmode
3839 && !AGGREGATE_TYPE_P (type))
3840 {
3841 unsigned int align = GET_MODE_ALIGNMENT (mode);
3842 if (crtl->stack_alignment_estimated < align)
3843 {
3844 gcc_assert (!crtl->stack_realign_processed);
3845 crtl->stack_alignment_estimated = align;
3846 }
3847 }
3848 }
3849 }
3850
3851 /* If we are receiving a struct value address as the first argument, set up
3852 the RTL for the function result. As this might require code to convert
3853 the transmitted address to Pmode, we do this here to ensure that possible
3854 preliminary conversions of the address have been emitted already. */
3855 if (all.function_result_decl)
3856 {
3857 tree result = DECL_RESULT (current_function_decl);
3858 rtx addr = DECL_RTL (all.function_result_decl);
3859 rtx x;
3860
3861 if (DECL_BY_REFERENCE (result))
3862 {
3863 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3864 x = addr;
3865 }
3866 else
3867 {
3868 SET_DECL_VALUE_EXPR (result,
3869 build1 (INDIRECT_REF, TREE_TYPE (result),
3870 all.function_result_decl));
3871 addr = convert_memory_address (Pmode, addr);
3872 x = gen_rtx_MEM (DECL_MODE (result), addr);
3873 set_mem_attributes (x, result, 1);
3874 }
3875
3876 DECL_HAS_VALUE_EXPR_P (result) = 1;
3877
3878 set_parm_rtl (result, x);
3879 }
3880
3881 /* We have aligned all the args, so add space for the pretend args. */
3882 crtl->args.pretend_args_size = all.pretend_args_size;
3883 all.stack_args_size.constant += all.extra_pretend_bytes;
3884 crtl->args.size = all.stack_args_size.constant;
3885
3886 /* Adjust function incoming argument size for alignment and
3887 minimum length. */
3888
3889 crtl->args.size = MAX (crtl->args.size, all.reg_parm_stack_space);
3890 crtl->args.size = CEIL_ROUND (crtl->args.size,
3891 PARM_BOUNDARY / BITS_PER_UNIT);
3892
3893 if (ARGS_GROW_DOWNWARD)
3894 {
3895 crtl->args.arg_offset_rtx
3896 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3897 : expand_expr (size_diffop (all.stack_args_size.var,
3898 size_int (-all.stack_args_size.constant)),
3899 NULL_RTX, VOIDmode, EXPAND_NORMAL));
3900 }
3901 else
3902 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3903
3904 /* See how many bytes, if any, of its args a function should try to pop
3905 on return. */
3906
3907 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3908 TREE_TYPE (fndecl),
3909 crtl->args.size);
3910
3911 /* For stdarg.h function, save info about
3912 regs and stack space used by the named args. */
3913
3914 crtl->args.info = all.args_so_far_v;
3915
3916 /* Set the rtx used for the function return value. Put this in its
3917 own variable so any optimizers that need this information don't have
3918 to include tree.h. Do this here so it gets done when an inlined
3919 function gets output. */
3920
3921 crtl->return_rtx
3922 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3923 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3924
3925 /* If scalar return value was computed in a pseudo-reg, or was a named
3926 return value that got dumped to the stack, copy that to the hard
3927 return register. */
3928 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3929 {
3930 tree decl_result = DECL_RESULT (fndecl);
3931 rtx decl_rtl = DECL_RTL (decl_result);
3932
3933 if (REG_P (decl_rtl)
3934 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3935 : DECL_REGISTER (decl_result))
3936 {
3937 rtx real_decl_rtl;
3938
3939 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3940 fndecl, true);
3941 if (chkp_function_instrumented_p (fndecl))
3942 crtl->return_bnd
3943 = targetm.calls.chkp_function_value_bounds (TREE_TYPE (decl_result),
3944 fndecl, true);
3945 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3946 /* The delay slot scheduler assumes that crtl->return_rtx
3947 holds the hard register containing the return value, not a
3948 temporary pseudo. */
3949 crtl->return_rtx = real_decl_rtl;
3950 }
3951 }
3952}
3953
3954/* A subroutine of gimplify_parameters, invoked via walk_tree.
3955 For all seen types, gimplify their sizes. */
3956
3957static tree
3958gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3959{
3960 tree t = *tp;
3961
3962 *walk_subtrees = 0;
3963 if (TYPE_P (t))
3964 {
3965 if (POINTER_TYPE_P (t))
3966 *walk_subtrees = 1;
3967 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3968 && !TYPE_SIZES_GIMPLIFIED (t))
3969 {
3970 gimplify_type_sizes (t, (gimple_seq *) data);
3971 *walk_subtrees = 1;
3972 }
3973 }
3974
3975 return NULL;
3976}
3977
3978/* Gimplify the parameter list for current_function_decl. This involves
3979 evaluating SAVE_EXPRs of variable sized parameters and generating code
3980 to implement callee-copies reference parameters. Returns a sequence of
3981 statements to add to the beginning of the function. */
3982
3983gimple_seq
3984gimplify_parameters (void)
3985{
3986 struct assign_parm_data_all all;
3987 tree parm;
3988 gimple_seq stmts = NULL;
3989 vec<tree> fnargs;
3990 unsigned i;
3991
3992 assign_parms_initialize_all (&all);
3993 fnargs = assign_parms_augmented_arg_list (&all);
3994
3995 FOR_EACH_VEC_ELT (fnargs, i, parm)
3996 {
3997 struct assign_parm_data_one data;
3998
3999 /* Extract the type of PARM; adjust it according to ABI. */
4000 assign_parm_find_data_types (&all, parm, &data);
4001
4002 /* Early out for errors and void parameters. */
4003 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
4004 continue;
4005
4006 /* Update info on where next arg arrives in registers. */
4007 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
4008 data.passed_type, data.named_arg);
4009
4010 /* ??? Once upon a time variable_size stuffed parameter list
4011 SAVE_EXPRs (amongst others) onto a pending sizes list. This
4012 turned out to be less than manageable in the gimple world.
4013 Now we have to hunt them down ourselves. */
4014 walk_tree_without_duplicates (&data.passed_type,
4015 gimplify_parm_type, &stmts);
4016
4017 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
4018 {
4019 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
4020 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
4021 }
4022
4023 if (data.passed_pointer)
4024 {
4025 tree type = TREE_TYPE (data.passed_type);
4026 if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type),
4027 type, data.named_arg))
4028 {
4029 tree local, t;
4030
4031 /* For constant-sized objects, this is trivial; for
4032 variable-sized objects, we have to play games. */
4033 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
4034 && !(flag_stack_check == GENERIC_STACK_CHECK
4035 && compare_tree_int (DECL_SIZE_UNIT (parm),
4036 STACK_CHECK_MAX_VAR_SIZE) > 0))
4037 {
4038 local = create_tmp_var (type, get_name (parm));
4039 DECL_IGNORED_P (local) = 0;
4040 /* If PARM was addressable, move that flag over
4041 to the local copy, as its address will be taken,
4042 not the PARMs. Keep the parms address taken
4043 as we'll query that flag during gimplification. */
4044 if (TREE_ADDRESSABLE (parm))
4045 TREE_ADDRESSABLE (local) = 1;
4046 else if (TREE_CODE (type) == COMPLEX_TYPE
4047 || TREE_CODE (type) == VECTOR_TYPE)
4048 DECL_GIMPLE_REG_P (local) = 1;
4049 }
4050 else
4051 {
4052 tree ptr_type, addr;
4053
4054 ptr_type = build_pointer_type (type);
4055 addr = create_tmp_reg (ptr_type, get_name (parm));
4056 DECL_IGNORED_P (addr) = 0;
4057 local = build_fold_indirect_ref (addr);
4058
4059 t = build_alloca_call_expr (DECL_SIZE_UNIT (parm),
4060 DECL_ALIGN (parm),
4061 max_int_size_in_bytes (type));
4062 /* The call has been built for a variable-sized object. */
4063 CALL_ALLOCA_FOR_VAR_P (t) = 1;
4064 t = fold_convert (ptr_type, t);
4065 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
4066 gimplify_and_add (t, &stmts);
4067 }
4068
4069 gimplify_assign (local, parm, &stmts);
4070
4071 SET_DECL_VALUE_EXPR (parm, local);
4072 DECL_HAS_VALUE_EXPR_P (parm) = 1;
4073 }
4074 }
4075 }
4076
4077 fnargs.release ();
4078
4079 return stmts;
4080}
4081
4082/* Compute the size and offset from the start of the stacked arguments for a
4083 parm passed in mode PASSED_MODE and with type TYPE.
4084
4085 INITIAL_OFFSET_PTR points to the current offset into the stacked
4086 arguments.
4087
4088 The starting offset and size for this parm are returned in
4089 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
4090 nonzero, the offset is that of stack slot, which is returned in
4091 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
4092 padding required from the initial offset ptr to the stack slot.
4093
4094 IN_REGS is nonzero if the argument will be passed in registers. It will
4095 never be set if REG_PARM_STACK_SPACE is not defined.
4096
4097 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
4098 for arguments which are passed in registers.
4099
4100 FNDECL is the function in which the argument was defined.
4101
4102 There are two types of rounding that are done. The first, controlled by
4103 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
4104 argument list to be aligned to the specific boundary (in bits). This
4105 rounding affects the initial and starting offsets, but not the argument
4106 size.
4107
4108 The second, controlled by TARGET_FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4109 optionally rounds the size of the parm to PARM_BOUNDARY. The
4110 initial offset is not affected by this rounding, while the size always
4111 is and the starting offset may be. */
4112
4113/* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
4114 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
4115 callers pass in the total size of args so far as
4116 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
4117
4118void
4119locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs,
4120 int reg_parm_stack_space, int partial,
4121 tree fndecl ATTRIBUTE_UNUSED,
4122 struct args_size *initial_offset_ptr,
4123 struct locate_and_pad_arg_data *locate)
4124{
4125 tree sizetree;
4126 pad_direction where_pad;
4127 unsigned int boundary, round_boundary;
4128 int part_size_in_regs;
4129
4130 /* If we have found a stack parm before we reach the end of the
4131 area reserved for registers, skip that area. */
4132 if (! in_regs)
4133 {
4134 if (reg_parm_stack_space > 0)
4135 {
4136 if (initial_offset_ptr->var)
4137 {
4138 initial_offset_ptr->var
4139 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4140 ssize_int (reg_parm_stack_space));
4141 initial_offset_ptr->constant = 0;
4142 }
4143 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4144 initial_offset_ptr->constant = reg_parm_stack_space;
4145 }
4146 }
4147
4148 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
4149
4150 sizetree = (type
4151 ? arg_size_in_bytes (type)
4152 : size_int (GET_MODE_SIZE (passed_mode)));
4153 where_pad = targetm.calls.function_arg_padding (passed_mode, type);
4154 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
4155 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
4156 type);
4157 locate->where_pad = where_pad;
4158
4159 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
4160 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4161 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4162
4163 locate->boundary = boundary;
4164
4165 if (SUPPORTS_STACK_ALIGNMENT)
4166 {
4167 /* stack_alignment_estimated can't change after stack has been
4168 realigned. */
4169 if (crtl->stack_alignment_estimated < boundary)
4170 {
4171 if (!crtl->stack_realign_processed)
4172 crtl->stack_alignment_estimated = boundary;
4173 else
4174 {
4175 /* If stack is realigned and stack alignment value
4176 hasn't been finalized, it is OK not to increase
4177 stack_alignment_estimated. The bigger alignment
4178 requirement is recorded in stack_alignment_needed
4179 below. */
4180 gcc_assert (!crtl->stack_realign_finalized
4181 && crtl->stack_realign_needed);
4182 }
4183 }
4184 }
4185
4186 /* Remember if the outgoing parameter requires extra alignment on the
4187 calling function side. */
4188 if (crtl->stack_alignment_needed < boundary)
4189 crtl->stack_alignment_needed = boundary;
4190 if (crtl->preferred_stack_boundary < boundary)
4191 crtl->preferred_stack_boundary = boundary;
4192
4193 if (ARGS_GROW_DOWNWARD)
4194 {
4195 locate->slot_offset.constant = -initial_offset_ptr->constant;
4196 if (initial_offset_ptr->var)
4197 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
4198 initial_offset_ptr->var);
4199
4200 {
4201 tree s2 = sizetree;
4202 if (where_pad != PAD_NONE
4203 && (!tree_fits_uhwi_p (sizetree)
4204 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4205 s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
4206 SUB_PARM_SIZE (locate->slot_offset, s2);
4207 }
4208
4209 locate->slot_offset.constant += part_size_in_regs;
4210
4211 if (!in_regs || reg_parm_stack_space > 0)
4212 pad_to_arg_alignment (&locate->slot_offset, boundary,
4213 &locate->alignment_pad);
4214
4215 locate->size.constant = (-initial_offset_ptr->constant
4216 - locate->slot_offset.constant);
4217 if (initial_offset_ptr->var)
4218 locate->size.var = size_binop (MINUS_EXPR,
4219 size_binop (MINUS_EXPR,
4220 ssize_int (0),
4221 initial_offset_ptr->var),
4222 locate->slot_offset.var);
4223
4224 /* Pad_below needs the pre-rounded size to know how much to pad
4225 below. */
4226 locate->offset = locate->slot_offset;
4227 if (where_pad == PAD_DOWNWARD)
4228 pad_below (&locate->offset, passed_mode, sizetree);
4229
4230 }
4231 else
4232 {
4233 if (!in_regs || reg_parm_stack_space > 0)
4234 pad_to_arg_alignment (initial_offset_ptr, boundary,
4235 &locate->alignment_pad);
4236 locate->slot_offset = *initial_offset_ptr;
4237
4238#ifdef PUSH_ROUNDING
4239 if (passed_mode != BLKmode)
4240 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4241#endif
4242
4243 /* Pad_below needs the pre-rounded size to know how much to pad below
4244 so this must be done before rounding up. */
4245 locate->offset = locate->slot_offset;
4246 if (where_pad == PAD_DOWNWARD)
4247 pad_below (&locate->offset, passed_mode, sizetree);
4248
4249 if (where_pad != PAD_NONE
4250 && (!tree_fits_uhwi_p (sizetree)
4251 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4252 sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
4253
4254 ADD_PARM_SIZE (locate->size, sizetree);
4255
4256 locate->size.constant -= part_size_in_regs;
4257 }
4258
4259 locate->offset.constant
4260 += targetm.calls.function_arg_offset (passed_mode, type);
4261}
4262
4263/* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4264 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4265
4266static void
4267pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
4268 struct args_size *alignment_pad)
4269{
4270 tree save_var = NULL_TREE;
4271 HOST_WIDE_INT save_constant = 0;
4272 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4273 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
4274
4275#ifdef SPARC_STACK_BOUNDARY_HACK
4276 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
4277 the real alignment of %sp. However, when it does this, the
4278 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
4279 if (SPARC_STACK_BOUNDARY_HACK)
4280 sp_offset = 0;
4281#endif
4282
4283 if (boundary > PARM_BOUNDARY)
4284 {
4285 save_var = offset_ptr->var;
4286 save_constant = offset_ptr->constant;
4287 }
4288
4289 alignment_pad->var = NULL_TREE;
4290 alignment_pad->constant = 0;
4291
4292 if (boundary > BITS_PER_UNIT)
4293 {
4294 if (offset_ptr->var)
4295 {
4296 tree sp_offset_tree = ssize_int (sp_offset);
4297 tree offset = size_binop (PLUS_EXPR,
4298 ARGS_SIZE_TREE (*offset_ptr),
4299 sp_offset_tree);
4300 tree rounded;
4301 if (ARGS_GROW_DOWNWARD)
4302 rounded = round_down (offset, boundary / BITS_PER_UNIT);
4303 else
4304 rounded = round_up (offset, boundary / BITS_PER_UNIT);
4305
4306 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
4307 /* ARGS_SIZE_TREE includes constant term. */
4308 offset_ptr->constant = 0;
4309 if (boundary > PARM_BOUNDARY)
4310 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
4311 save_var);
4312 }
4313 else
4314 {
4315 offset_ptr->constant = -sp_offset +
4316 (ARGS_GROW_DOWNWARD
4317 ? FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes)
4318 : CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes));
4319
4320 if (boundary > PARM_BOUNDARY)
4321 alignment_pad->constant = offset_ptr->constant - save_constant;
4322 }
4323 }
4324}
4325
4326static void
4327pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree)
4328{
4329 unsigned int align = PARM_BOUNDARY / BITS_PER_UNIT;
4330 if (passed_mode != BLKmode)
4331 offset_ptr->constant += -GET_MODE_SIZE (passed_mode) & (align - 1);
4332 else
4333 {
4334 if (TREE_CODE (sizetree) != INTEGER_CST
4335 || (TREE_INT_CST_LOW (sizetree) & (align - 1)) != 0)
4336 {
4337 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4338 tree s2 = round_up (