1/* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989-2024 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "backend.h"
24#include "target.h"
25#include "rtl.h"
26#include "tree.h"
27#include "gimple.h"
28#include "predict.h"
29#include "memmodel.h"
30#include "tm_p.h"
31#include "stringpool.h"
32#include "expmed.h"
33#include "optabs.h"
34#include "emit-rtl.h"
35#include "cgraph.h"
36#include "diagnostic-core.h"
37#include "fold-const.h"
38#include "stor-layout.h"
39#include "varasm.h"
40#include "internal-fn.h"
41#include "dojump.h"
42#include "explow.h"
43#include "calls.h"
44#include "expr.h"
45#include "output.h"
46#include "langhooks.h"
47#include "except.h"
48#include "dbgcnt.h"
49#include "rtl-iter.h"
50#include "tree-vrp.h"
51#include "tree-ssanames.h"
52#include "intl.h"
53#include "stringpool.h"
54#include "hash-map.h"
55#include "hash-traits.h"
56#include "attribs.h"
57#include "builtins.h"
58#include "gimple-iterator.h"
59#include "gimple-fold.h"
60#include "attr-fnspec.h"
61#include "value-query.h"
62#include "tree-pretty-print.h"
63#include "tree-eh.h"
64
65/* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
66#define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
67
68/* Data structure and subroutines used within expand_call. */
69
70struct arg_data
71{
72 /* Tree node for this argument. */
73 tree tree_value;
74 /* Mode for value; TYPE_MODE unless promoted. */
75 machine_mode mode;
76 /* Current RTL value for argument, or 0 if it isn't precomputed. */
77 rtx value;
78 /* Initially-compute RTL value for argument; only for const functions. */
79 rtx initial_value;
80 /* Register to pass this argument in, 0 if passed on stack, or an
81 PARALLEL if the arg is to be copied into multiple non-contiguous
82 registers. */
83 rtx reg;
84 /* Register to pass this argument in when generating tail call sequence.
85 This is not the same register as for normal calls on machines with
86 register windows. */
87 rtx tail_call_reg;
88 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
89 form for emit_group_move. */
90 rtx parallel_value;
91 /* If REG was promoted from the actual mode of the argument expression,
92 indicates whether the promotion is sign- or zero-extended. */
93 int unsignedp;
94 /* Number of bytes to put in registers. 0 means put the whole arg
95 in registers. Also 0 if not passed in registers. */
96 int partial;
97 /* True if argument must be passed on stack.
98 Note that some arguments may be passed on the stack
99 even though pass_on_stack is false, just because FUNCTION_ARG says so.
100 pass_on_stack identifies arguments that *cannot* go in registers. */
101 bool pass_on_stack;
102 /* Some fields packaged up for locate_and_pad_parm. */
103 struct locate_and_pad_arg_data locate;
104 /* Location on the stack at which parameter should be stored. The store
105 has already been done if STACK == VALUE. */
106 rtx stack;
107 /* Location on the stack of the start of this argument slot. This can
108 differ from STACK if this arg pads downward. This location is known
109 to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
110 rtx stack_slot;
111 /* Place that this stack area has been saved, if needed. */
112 rtx save_area;
113 /* If an argument's alignment does not permit direct copying into registers,
114 copy in smaller-sized pieces into pseudos. These are stored in a
115 block pointed to by this field. The next field says how many
116 word-sized pseudos we made. */
117 rtx *aligned_regs;
118 int n_aligned_regs;
119};
120
121/* A vector of one char per byte of stack space. A byte if nonzero if
122 the corresponding stack location has been used.
123 This vector is used to prevent a function call within an argument from
124 clobbering any stack already set up. */
125static char *stack_usage_map;
126
127/* Size of STACK_USAGE_MAP. */
128static unsigned int highest_outgoing_arg_in_use;
129
130/* Assume that any stack location at this byte index is used,
131 without checking the contents of stack_usage_map. */
132static unsigned HOST_WIDE_INT stack_usage_watermark = HOST_WIDE_INT_M1U;
133
134/* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
135 stack location's tail call argument has been already stored into the stack.
136 This bitmap is used to prevent sibling call optimization if function tries
137 to use parent's incoming argument slots when they have been already
138 overwritten with tail call arguments. */
139static sbitmap stored_args_map;
140
141/* Assume that any virtual-incoming location at this byte index has been
142 stored, without checking the contents of stored_args_map. */
143static unsigned HOST_WIDE_INT stored_args_watermark;
144
145/* stack_arg_under_construction is nonzero when an argument may be
146 initialized with a constructor call (including a C function that
147 returns a BLKmode struct) and expand_call must take special action
148 to make sure the object being constructed does not overlap the
149 argument list for the constructor call. */
150static int stack_arg_under_construction;
151
152static void precompute_register_parameters (int, struct arg_data *, int *);
153static bool store_one_arg (struct arg_data *, rtx, int, int, int);
154static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
155static bool finalize_must_preallocate (bool, int, struct arg_data *,
156 struct args_size *);
157static void precompute_arguments (int, struct arg_data *);
158static void compute_argument_addresses (struct arg_data *, rtx, int);
159static rtx rtx_for_function_call (tree, tree);
160static void load_register_parameters (struct arg_data *, int, rtx *, int,
161 int, bool *);
162static int special_function_p (const_tree, int);
163static bool check_sibcall_argument_overlap_1 (rtx);
164static bool check_sibcall_argument_overlap (rtx_insn *, struct arg_data *,
165 bool);
166static tree split_complex_types (tree);
167
168#ifdef REG_PARM_STACK_SPACE
169static rtx save_fixed_argument_area (int, rtx, int *, int *);
170static void restore_fixed_argument_area (rtx, rtx, int, int);
171#endif
172
173/* Return true if bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
174 stack region might already be in use. */
175
176static bool
177stack_region_maybe_used_p (poly_uint64 lower_bound, poly_uint64 upper_bound,
178 unsigned int reg_parm_stack_space)
179{
180 unsigned HOST_WIDE_INT const_lower, const_upper;
181 const_lower = constant_lower_bound (a: lower_bound);
182 if (!upper_bound.is_constant (const_value: &const_upper))
183 const_upper = HOST_WIDE_INT_M1U;
184
185 if (const_upper > stack_usage_watermark)
186 return true;
187
188 /* Don't worry about things in the fixed argument area;
189 it has already been saved. */
190 const_lower = MAX (const_lower, reg_parm_stack_space);
191 const_upper = MIN (const_upper, highest_outgoing_arg_in_use);
192 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
193 if (stack_usage_map[i])
194 return true;
195 return false;
196}
197
198/* Record that bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
199 stack region are now in use. */
200
201static void
202mark_stack_region_used (poly_uint64 lower_bound, poly_uint64 upper_bound)
203{
204 unsigned HOST_WIDE_INT const_lower, const_upper;
205 const_lower = constant_lower_bound (a: lower_bound);
206 if (upper_bound.is_constant (const_value: &const_upper)
207 && const_upper <= highest_outgoing_arg_in_use)
208 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
209 stack_usage_map[i] = 1;
210 else
211 stack_usage_watermark = MIN (stack_usage_watermark, const_lower);
212}
213
214/* Force FUNEXP into a form suitable for the address of a CALL,
215 and return that as an rtx. Also load the static chain register
216 if FNDECL is a nested function.
217
218 CALL_FUSAGE points to a variable holding the prospective
219 CALL_INSN_FUNCTION_USAGE information. */
220
221rtx
222prepare_call_address (tree fndecl_or_type, rtx funexp, rtx static_chain_value,
223 rtx *call_fusage, int reg_parm_seen, int flags)
224{
225 /* Make a valid memory address and copy constants through pseudo-regs,
226 but not for a constant address if -fno-function-cse. */
227 if (GET_CODE (funexp) != SYMBOL_REF)
228 {
229 /* If it's an indirect call by descriptor, generate code to perform
230 runtime identification of the pointer and load the descriptor. */
231 if ((flags & ECF_BY_DESCRIPTOR) && !flag_trampolines)
232 {
233 const int bit_val = targetm.calls.custom_function_descriptors;
234 rtx call_lab = gen_label_rtx ();
235
236 gcc_assert (fndecl_or_type && TYPE_P (fndecl_or_type));
237 fndecl_or_type
238 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
239 fndecl_or_type);
240 DECL_STATIC_CHAIN (fndecl_or_type) = 1;
241 rtx chain = targetm.calls.static_chain (fndecl_or_type, false);
242
243 if (GET_MODE (funexp) != Pmode)
244 funexp = convert_memory_address (Pmode, funexp);
245
246 /* Avoid long live ranges around function calls. */
247 funexp = copy_to_mode_reg (Pmode, funexp);
248
249 if (REG_P (chain))
250 emit_insn (gen_rtx_CLOBBER (VOIDmode, chain));
251
252 /* Emit the runtime identification pattern. */
253 rtx mask = gen_rtx_AND (Pmode, funexp, GEN_INT (bit_val));
254 emit_cmp_and_jump_insns (mask, const0_rtx, EQ, NULL_RTX, Pmode, 1,
255 call_lab);
256
257 /* Statically predict the branch to very likely taken. */
258 rtx_insn *insn = get_last_insn ();
259 if (JUMP_P (insn))
260 predict_insn_def (insn, PRED_BUILTIN_EXPECT, TAKEN);
261
262 /* Load the descriptor. */
263 rtx mem = gen_rtx_MEM (ptr_mode,
264 plus_constant (Pmode, funexp, - bit_val));
265 MEM_NOTRAP_P (mem) = 1;
266 mem = convert_memory_address (Pmode, mem);
267 emit_move_insn (chain, mem);
268
269 mem = gen_rtx_MEM (ptr_mode,
270 plus_constant (Pmode, funexp,
271 POINTER_SIZE / BITS_PER_UNIT
272 - bit_val));
273 MEM_NOTRAP_P (mem) = 1;
274 mem = convert_memory_address (Pmode, mem);
275 emit_move_insn (funexp, mem);
276
277 emit_label (call_lab);
278
279 if (REG_P (chain))
280 {
281 use_reg (fusage: call_fusage, reg: chain);
282 STATIC_CHAIN_REG_P (chain) = 1;
283 }
284
285 /* Make sure we're not going to be overwritten below. */
286 gcc_assert (!static_chain_value);
287 }
288
289 /* If we are using registers for parameters, force the
290 function address into a register now. */
291 funexp = ((reg_parm_seen
292 && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
293 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
294 : memory_address (FUNCTION_MODE, funexp));
295 }
296 else
297 {
298 /* funexp could be a SYMBOL_REF represents a function pointer which is
299 of ptr_mode. In this case, it should be converted into address mode
300 to be a valid address for memory rtx pattern. See PR 64971. */
301 if (GET_MODE (funexp) != Pmode)
302 funexp = convert_memory_address (Pmode, funexp);
303
304 if (!(flags & ECF_SIBCALL))
305 {
306 if (!NO_FUNCTION_CSE && optimize && ! flag_no_function_cse)
307 funexp = force_reg (Pmode, funexp);
308 }
309 }
310
311 if (static_chain_value != 0
312 && (TREE_CODE (fndecl_or_type) != FUNCTION_DECL
313 || DECL_STATIC_CHAIN (fndecl_or_type)))
314 {
315 rtx chain;
316
317 chain = targetm.calls.static_chain (fndecl_or_type, false);
318 static_chain_value = convert_memory_address (Pmode, static_chain_value);
319
320 emit_move_insn (chain, static_chain_value);
321 if (REG_P (chain))
322 {
323 use_reg (fusage: call_fusage, reg: chain);
324 STATIC_CHAIN_REG_P (chain) = 1;
325 }
326 }
327
328 return funexp;
329}
330
331/* Generate instructions to call function FUNEXP,
332 and optionally pop the results.
333 The CALL_INSN is the first insn generated.
334
335 FNDECL is the declaration node of the function. This is given to the
336 hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
337 its own args.
338
339 FUNTYPE is the data type of the function. This is given to the hook
340 TARGET_RETURN_POPS_ARGS to determine whether this function pops its
341 own args. We used to allow an identifier for library functions, but
342 that doesn't work when the return type is an aggregate type and the
343 calling convention says that the pointer to this aggregate is to be
344 popped by the callee.
345
346 STACK_SIZE is the number of bytes of arguments on the stack,
347 ROUNDED_STACK_SIZE is that number rounded up to
348 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
349 both to put into the call insn and to generate explicit popping
350 code if necessary.
351
352 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
353 It is zero if this call doesn't want a structure value.
354
355 NEXT_ARG_REG is the rtx that results from executing
356 targetm.calls.function_arg (&args_so_far,
357 function_arg_info::end_marker ());
358 just after all the args have had their registers assigned.
359 This could be whatever you like, but normally it is the first
360 arg-register beyond those used for args in this call,
361 or 0 if all the arg-registers are used in this call.
362 It is passed on to `gen_call' so you can put this info in the call insn.
363
364 VALREG is a hard register in which a value is returned,
365 or 0 if the call does not return a value.
366
367 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
368 the args to this call were processed.
369 We restore `inhibit_defer_pop' to that value.
370
371 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
372 denote registers used by the called function. */
373
374static void
375emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
376 tree funtype ATTRIBUTE_UNUSED,
377 poly_int64 stack_size ATTRIBUTE_UNUSED,
378 poly_int64 rounded_stack_size,
379 poly_int64 struct_value_size ATTRIBUTE_UNUSED,
380 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
381 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
382 cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
383{
384 rtx rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
385 rtx call, funmem, pat;
386 bool already_popped = false;
387 poly_int64 n_popped = 0;
388
389 /* Sibling call patterns never pop arguments (no sibcall(_value)_pop
390 patterns exist). Any popping that the callee does on return will
391 be from our caller's frame rather than ours. */
392 if (!(ecf_flags & ECF_SIBCALL))
393 {
394 n_popped += targetm.calls.return_pops_args (fndecl, funtype, stack_size);
395
396#ifdef CALL_POPS_ARGS
397 n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
398#endif
399 }
400
401 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
402 and we don't want to load it into a register as an optimization,
403 because prepare_call_address already did it if it should be done. */
404 if (GET_CODE (funexp) != SYMBOL_REF)
405 funexp = memory_address (FUNCTION_MODE, funexp);
406
407 funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
408 if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
409 {
410 tree t = fndecl;
411
412 /* Although a built-in FUNCTION_DECL and its non-__builtin
413 counterpart compare equal and get a shared mem_attrs, they
414 produce different dump output in compare-debug compilations,
415 if an entry gets garbage collected in one compilation, then
416 adds a different (but equivalent) entry, while the other
417 doesn't run the garbage collector at the same spot and then
418 shares the mem_attr with the equivalent entry. */
419 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
420 {
421 tree t2 = builtin_decl_explicit (fncode: DECL_FUNCTION_CODE (decl: t));
422 if (t2)
423 t = t2;
424 }
425
426 set_mem_expr (funmem, t);
427 }
428 else if (fntree)
429 set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
430
431 if (ecf_flags & ECF_SIBCALL)
432 {
433 if (valreg)
434 pat = targetm.gen_sibcall_value (valreg, funmem,
435 rounded_stack_size_rtx,
436 next_arg_reg, NULL_RTX);
437 else
438 pat = targetm.gen_sibcall (funmem, rounded_stack_size_rtx,
439 next_arg_reg,
440 gen_int_mode (struct_value_size, Pmode));
441 }
442 /* If the target has "call" or "call_value" insns, then prefer them
443 if no arguments are actually popped. If the target does not have
444 "call" or "call_value" insns, then we must use the popping versions
445 even if the call has no arguments to pop. */
446 else if (maybe_ne (a: n_popped, b: 0)
447 || !(valreg
448 ? targetm.have_call_value ()
449 : targetm.have_call ()))
450 {
451 rtx n_pop = gen_int_mode (n_popped, Pmode);
452
453 /* If this subroutine pops its own args, record that in the call insn
454 if possible, for the sake of frame pointer elimination. */
455
456 if (valreg)
457 pat = targetm.gen_call_value_pop (valreg, funmem,
458 rounded_stack_size_rtx,
459 next_arg_reg, n_pop);
460 else
461 pat = targetm.gen_call_pop (funmem, rounded_stack_size_rtx,
462 next_arg_reg, n_pop);
463
464 already_popped = true;
465 }
466 else
467 {
468 if (valreg)
469 pat = targetm.gen_call_value (valreg, funmem, rounded_stack_size_rtx,
470 next_arg_reg, NULL_RTX);
471 else
472 pat = targetm.gen_call (funmem, rounded_stack_size_rtx, next_arg_reg,
473 gen_int_mode (struct_value_size, Pmode));
474 }
475 emit_insn (pat);
476
477 /* Find the call we just emitted. */
478 rtx_call_insn *call_insn = last_call_insn ();
479
480 /* Some target create a fresh MEM instead of reusing the one provided
481 above. Set its MEM_EXPR. */
482 call = get_call_rtx_from (call_insn);
483 if (call
484 && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
485 && MEM_EXPR (funmem) != NULL_TREE)
486 set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
487
488 /* Put the register usage information there. */
489 add_function_usage_to (call_insn, call_fusage);
490
491 /* If this is a const call, then set the insn's unchanging bit. */
492 if (ecf_flags & ECF_CONST)
493 RTL_CONST_CALL_P (call_insn) = 1;
494
495 /* If this is a pure call, then set the insn's unchanging bit. */
496 if (ecf_flags & ECF_PURE)
497 RTL_PURE_CALL_P (call_insn) = 1;
498
499 /* If this is a const call, then set the insn's unchanging bit. */
500 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
501 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
502
503 /* Create a nothrow REG_EH_REGION note, if needed. */
504 make_reg_eh_region_note (insn: call_insn, ecf_flags, lp_nr: 0);
505
506 if (ecf_flags & ECF_NORETURN)
507 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
508
509 if (ecf_flags & ECF_RETURNS_TWICE)
510 {
511 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
512 cfun->calls_setjmp = 1;
513 }
514
515 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
516
517 /* Restore this now, so that we do defer pops for this call's args
518 if the context of the call as a whole permits. */
519 inhibit_defer_pop = old_inhibit_defer_pop;
520
521 if (maybe_ne (a: n_popped, b: 0))
522 {
523 if (!already_popped)
524 CALL_INSN_FUNCTION_USAGE (call_insn)
525 = gen_rtx_EXPR_LIST (VOIDmode,
526 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
527 CALL_INSN_FUNCTION_USAGE (call_insn));
528 rounded_stack_size -= n_popped;
529 rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
530 stack_pointer_delta -= n_popped;
531
532 add_args_size_note (call_insn, stack_pointer_delta);
533
534 /* If popup is needed, stack realign must use DRAP */
535 if (SUPPORTS_STACK_ALIGNMENT)
536 crtl->need_drap = true;
537 }
538 /* For noreturn calls when not accumulating outgoing args force
539 REG_ARGS_SIZE note to prevent crossjumping of calls with different
540 args sizes. */
541 else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0)
542 add_args_size_note (call_insn, stack_pointer_delta);
543
544 if (!ACCUMULATE_OUTGOING_ARGS)
545 {
546 /* If returning from the subroutine does not automatically pop the args,
547 we need an instruction to pop them sooner or later.
548 Perhaps do it now; perhaps just record how much space to pop later.
549
550 If returning from the subroutine does pop the args, indicate that the
551 stack pointer will be changed. */
552
553 if (maybe_ne (a: rounded_stack_size, b: 0))
554 {
555 if (ecf_flags & ECF_NORETURN)
556 /* Just pretend we did the pop. */
557 stack_pointer_delta -= rounded_stack_size;
558 else if (flag_defer_pop && inhibit_defer_pop == 0
559 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
560 pending_stack_adjust += rounded_stack_size;
561 else
562 adjust_stack (rounded_stack_size_rtx);
563 }
564 }
565 /* When we accumulate outgoing args, we must avoid any stack manipulations.
566 Restore the stack pointer to its original value now. Usually
567 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
568 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
569 popping variants of functions exist as well.
570
571 ??? We may optimize similar to defer_pop above, but it is
572 probably not worthwhile.
573
574 ??? It will be worthwhile to enable combine_stack_adjustments even for
575 such machines. */
576 else if (maybe_ne (a: n_popped, b: 0))
577 anti_adjust_stack (gen_int_mode (n_popped, Pmode));
578}
579
580/* Determine if the function identified by FNDECL is one with
581 special properties we wish to know about. Modify FLAGS accordingly.
582
583 For example, if the function might return more than one time (setjmp), then
584 set ECF_RETURNS_TWICE.
585
586 Set ECF_MAY_BE_ALLOCA for any memory allocation function that might allocate
587 space from the stack such as alloca. */
588
589static int
590special_function_p (const_tree fndecl, int flags)
591{
592 tree name_decl = DECL_NAME (fndecl);
593
594 if (maybe_special_function_p (fndecl)
595 && IDENTIFIER_LENGTH (name_decl) <= 11)
596 {
597 const char *name = IDENTIFIER_POINTER (name_decl);
598 const char *tname = name;
599
600 /* We assume that alloca will always be called by name. It
601 makes no sense to pass it as a pointer-to-function to
602 anything that does not understand its behavior. */
603 if (IDENTIFIER_LENGTH (name_decl) == 6
604 && name[0] == 'a'
605 && ! strcmp (s1: name, s2: "alloca"))
606 flags |= ECF_MAY_BE_ALLOCA;
607
608 /* Disregard prefix _ or __. */
609 if (name[0] == '_')
610 {
611 if (name[1] == '_')
612 tname += 2;
613 else
614 tname += 1;
615 }
616
617 /* ECF_RETURNS_TWICE is safe even for -ffreestanding. */
618 if (! strcmp (s1: tname, s2: "setjmp")
619 || ! strcmp (s1: tname, s2: "sigsetjmp")
620 || ! strcmp (s1: name, s2: "savectx")
621 || ! strcmp (s1: name, s2: "vfork")
622 || ! strcmp (s1: name, s2: "getcontext"))
623 flags |= ECF_RETURNS_TWICE;
624 }
625
626 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
627 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (fndecl)))
628 flags |= ECF_MAY_BE_ALLOCA;
629
630 return flags;
631}
632
633/* Return fnspec for DECL. */
634
635static attr_fnspec
636decl_fnspec (tree fndecl)
637{
638 tree attr;
639 tree type = TREE_TYPE (fndecl);
640 if (type)
641 {
642 attr = lookup_attribute (attr_name: "fn spec", TYPE_ATTRIBUTES (type));
643 if (attr)
644 {
645 return TREE_VALUE (TREE_VALUE (attr));
646 }
647 }
648 if (fndecl_built_in_p (node: fndecl, klass: BUILT_IN_NORMAL))
649 return builtin_fnspec (fndecl);
650 return "";
651}
652
653/* Similar to special_function_p; return a set of ERF_ flags for the
654 function FNDECL. */
655static int
656decl_return_flags (tree fndecl)
657{
658 attr_fnspec fnspec = decl_fnspec (fndecl);
659
660 unsigned int arg;
661 if (fnspec.returns_arg (arg_no: &arg))
662 return ERF_RETURNS_ARG | arg;
663
664 if (fnspec.returns_noalias_p ())
665 return ERF_NOALIAS;
666 return 0;
667}
668
669/* Return true when FNDECL represents a call to setjmp. */
670
671bool
672setjmp_call_p (const_tree fndecl)
673{
674 if (DECL_IS_RETURNS_TWICE (fndecl))
675 return true;
676 if (special_function_p (fndecl, flags: 0) & ECF_RETURNS_TWICE)
677 return true;
678
679 return false;
680}
681
682
683/* Return true if STMT may be an alloca call. */
684
685bool
686gimple_maybe_alloca_call_p (const gimple *stmt)
687{
688 tree fndecl;
689
690 if (!is_gimple_call (gs: stmt))
691 return false;
692
693 fndecl = gimple_call_fndecl (gs: stmt);
694 if (fndecl && (special_function_p (fndecl, flags: 0) & ECF_MAY_BE_ALLOCA))
695 return true;
696
697 return false;
698}
699
700/* Return true if STMT is a builtin alloca call. */
701
702bool
703gimple_alloca_call_p (const gimple *stmt)
704{
705 tree fndecl;
706
707 if (!is_gimple_call (gs: stmt))
708 return false;
709
710 fndecl = gimple_call_fndecl (gs: stmt);
711 if (fndecl && fndecl_built_in_p (node: fndecl, klass: BUILT_IN_NORMAL))
712 switch (DECL_FUNCTION_CODE (decl: fndecl))
713 {
714 CASE_BUILT_IN_ALLOCA:
715 return gimple_call_num_args (gs: stmt) > 0;
716 default:
717 break;
718 }
719
720 return false;
721}
722
723/* Return true when exp contains a builtin alloca call. */
724
725bool
726alloca_call_p (const_tree exp)
727{
728 tree fndecl;
729 if (TREE_CODE (exp) == CALL_EXPR
730 && (fndecl = get_callee_fndecl (exp))
731 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
732 switch (DECL_FUNCTION_CODE (decl: fndecl))
733 {
734 CASE_BUILT_IN_ALLOCA:
735 return true;
736 default:
737 break;
738 }
739
740 return false;
741}
742
743/* Return TRUE if FNDECL is either a TM builtin or a TM cloned
744 function. Return FALSE otherwise. */
745
746static bool
747is_tm_builtin (const_tree fndecl)
748{
749 if (fndecl == NULL)
750 return false;
751
752 if (decl_is_tm_clone (fndecl))
753 return true;
754
755 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
756 {
757 switch (DECL_FUNCTION_CODE (decl: fndecl))
758 {
759 case BUILT_IN_TM_COMMIT:
760 case BUILT_IN_TM_COMMIT_EH:
761 case BUILT_IN_TM_ABORT:
762 case BUILT_IN_TM_IRREVOCABLE:
763 case BUILT_IN_TM_GETTMCLONE_IRR:
764 case BUILT_IN_TM_MEMCPY:
765 case BUILT_IN_TM_MEMMOVE:
766 case BUILT_IN_TM_MEMSET:
767 CASE_BUILT_IN_TM_STORE (1):
768 CASE_BUILT_IN_TM_STORE (2):
769 CASE_BUILT_IN_TM_STORE (4):
770 CASE_BUILT_IN_TM_STORE (8):
771 CASE_BUILT_IN_TM_STORE (FLOAT):
772 CASE_BUILT_IN_TM_STORE (DOUBLE):
773 CASE_BUILT_IN_TM_STORE (LDOUBLE):
774 CASE_BUILT_IN_TM_STORE (M64):
775 CASE_BUILT_IN_TM_STORE (M128):
776 CASE_BUILT_IN_TM_STORE (M256):
777 CASE_BUILT_IN_TM_LOAD (1):
778 CASE_BUILT_IN_TM_LOAD (2):
779 CASE_BUILT_IN_TM_LOAD (4):
780 CASE_BUILT_IN_TM_LOAD (8):
781 CASE_BUILT_IN_TM_LOAD (FLOAT):
782 CASE_BUILT_IN_TM_LOAD (DOUBLE):
783 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
784 CASE_BUILT_IN_TM_LOAD (M64):
785 CASE_BUILT_IN_TM_LOAD (M128):
786 CASE_BUILT_IN_TM_LOAD (M256):
787 case BUILT_IN_TM_LOG:
788 case BUILT_IN_TM_LOG_1:
789 case BUILT_IN_TM_LOG_2:
790 case BUILT_IN_TM_LOG_4:
791 case BUILT_IN_TM_LOG_8:
792 case BUILT_IN_TM_LOG_FLOAT:
793 case BUILT_IN_TM_LOG_DOUBLE:
794 case BUILT_IN_TM_LOG_LDOUBLE:
795 case BUILT_IN_TM_LOG_M64:
796 case BUILT_IN_TM_LOG_M128:
797 case BUILT_IN_TM_LOG_M256:
798 return true;
799 default:
800 break;
801 }
802 }
803 return false;
804}
805
806/* Detect flags (function attributes) from the function decl or type node. */
807
808int
809flags_from_decl_or_type (const_tree exp)
810{
811 int flags = 0;
812
813 if (DECL_P (exp))
814 {
815 /* The function exp may have the `malloc' attribute. */
816 if (DECL_IS_MALLOC (exp))
817 flags |= ECF_MALLOC;
818
819 /* The function exp may have the `returns_twice' attribute. */
820 if (DECL_IS_RETURNS_TWICE (exp))
821 flags |= ECF_RETURNS_TWICE;
822
823 /* Process the pure and const attributes. */
824 if (TREE_READONLY (exp))
825 flags |= ECF_CONST;
826 if (DECL_PURE_P (exp))
827 flags |= ECF_PURE;
828 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
829 flags |= ECF_LOOPING_CONST_OR_PURE;
830
831 if (DECL_IS_NOVOPS (exp))
832 flags |= ECF_NOVOPS;
833 if (lookup_attribute (attr_name: "leaf", DECL_ATTRIBUTES (exp)))
834 flags |= ECF_LEAF;
835 if (lookup_attribute (attr_name: "cold", DECL_ATTRIBUTES (exp)))
836 flags |= ECF_COLD;
837
838 if (TREE_NOTHROW (exp))
839 flags |= ECF_NOTHROW;
840
841 if (flag_tm)
842 {
843 if (is_tm_builtin (fndecl: exp))
844 flags |= ECF_TM_BUILTIN;
845 else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0
846 || lookup_attribute (attr_name: "transaction_pure",
847 TYPE_ATTRIBUTES (TREE_TYPE (exp))))
848 flags |= ECF_TM_PURE;
849 }
850
851 if (lookup_attribute (attr_name: "expected_throw", DECL_ATTRIBUTES (exp)))
852 flags |= ECF_XTHROW;
853
854 flags = special_function_p (fndecl: exp, flags);
855 }
856 else if (TYPE_P (exp))
857 {
858 if (TYPE_READONLY (exp))
859 flags |= ECF_CONST;
860
861 if (flag_tm
862 && ((flags & ECF_CONST) != 0
863 || lookup_attribute (attr_name: "transaction_pure", TYPE_ATTRIBUTES (exp))))
864 flags |= ECF_TM_PURE;
865 }
866 else
867 gcc_unreachable ();
868
869 if (TREE_THIS_VOLATILE (exp))
870 {
871 flags |= ECF_NORETURN;
872 if (flags & (ECF_CONST|ECF_PURE))
873 flags |= ECF_LOOPING_CONST_OR_PURE;
874 }
875
876 return flags;
877}
878
879/* Detect flags from a CALL_EXPR. */
880
881int
882call_expr_flags (const_tree t)
883{
884 int flags;
885 tree decl = get_callee_fndecl (t);
886
887 if (decl)
888 flags = flags_from_decl_or_type (exp: decl);
889 else if (CALL_EXPR_FN (t) == NULL_TREE)
890 flags = internal_fn_flags (CALL_EXPR_IFN (t));
891 else
892 {
893 tree type = TREE_TYPE (CALL_EXPR_FN (t));
894 if (type && TREE_CODE (type) == POINTER_TYPE)
895 flags = flags_from_decl_or_type (TREE_TYPE (type));
896 else
897 flags = 0;
898 if (CALL_EXPR_BY_DESCRIPTOR (t))
899 flags |= ECF_BY_DESCRIPTOR;
900 }
901
902 return flags;
903}
904
905/* Return true if ARG should be passed by invisible reference. */
906
907bool
908pass_by_reference (CUMULATIVE_ARGS *ca, function_arg_info arg)
909{
910 if (tree type = arg.type)
911 {
912 /* If this type contains non-trivial constructors, then it is
913 forbidden for the middle-end to create any new copies. */
914 if (TREE_ADDRESSABLE (type))
915 return true;
916
917 /* GCC post 3.4 passes *all* variable sized types by reference. */
918 if (!TYPE_SIZE (type) || !poly_int_tree_p (TYPE_SIZE (type)))
919 return true;
920
921 /* If a record type should be passed the same as its first (and only)
922 member, use the type and mode of that member. */
923 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
924 {
925 arg.type = TREE_TYPE (first_field (type));
926 arg.mode = TYPE_MODE (arg.type);
927 }
928 }
929
930 return targetm.calls.pass_by_reference (pack_cumulative_args (arg: ca), arg);
931}
932
933/* Return true if TYPE should be passed by reference when passed to
934 the "..." arguments of a function. */
935
936bool
937pass_va_arg_by_reference (tree type)
938{
939 return pass_by_reference (NULL, arg: function_arg_info (type, /*named=*/false));
940}
941
942/* Decide whether ARG, which occurs in the state described by CA,
943 should be passed by reference. Return true if so and update
944 ARG accordingly. */
945
946bool
947apply_pass_by_reference_rules (CUMULATIVE_ARGS *ca, function_arg_info &arg)
948{
949 if (pass_by_reference (ca, arg))
950 {
951 arg.type = build_pointer_type (arg.type);
952 arg.mode = TYPE_MODE (arg.type);
953 arg.pass_by_reference = true;
954 return true;
955 }
956 return false;
957}
958
959/* Return true if ARG, which is passed by reference, should be callee
960 copied instead of caller copied. */
961
962bool
963reference_callee_copied (CUMULATIVE_ARGS *ca, const function_arg_info &arg)
964{
965 if (arg.type && TREE_ADDRESSABLE (arg.type))
966 return false;
967 return targetm.calls.callee_copies (pack_cumulative_args (arg: ca), arg);
968}
969
970
971/* Precompute all register parameters as described by ARGS, storing values
972 into fields within the ARGS array.
973
974 NUM_ACTUALS indicates the total number elements in the ARGS array.
975
976 Set REG_PARM_SEEN if we encounter a register parameter. */
977
978static void
979precompute_register_parameters (int num_actuals, struct arg_data *args,
980 int *reg_parm_seen)
981{
982 int i;
983
984 *reg_parm_seen = 0;
985
986 for (i = 0; i < num_actuals; i++)
987 if (args[i].reg != 0 && ! args[i].pass_on_stack)
988 {
989 *reg_parm_seen = 1;
990
991 if (args[i].value == 0)
992 {
993 push_temp_slots ();
994 args[i].value = expand_normal (exp: args[i].tree_value);
995 preserve_temp_slots (args[i].value);
996 pop_temp_slots ();
997 }
998
999 /* If we are to promote the function arg to a wider mode,
1000 do it now. */
1001
1002 machine_mode old_mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1003
1004 /* Some ABIs require scalar floating point modes to be returned
1005 in a wider scalar integer mode. We need to explicitly
1006 reinterpret to an integer mode of the correct precision
1007 before extending to the desired result. */
1008 if (SCALAR_INT_MODE_P (args[i].mode)
1009 && SCALAR_FLOAT_MODE_P (old_mode)
1010 && known_gt (GET_MODE_SIZE (args[i].mode),
1011 GET_MODE_SIZE (old_mode)))
1012 args[i].value = convert_float_to_wider_int (mode: args[i].mode, fmode: old_mode,
1013 x: args[i].value);
1014 else if (args[i].mode != old_mode)
1015 args[i].value = convert_modes (mode: args[i].mode, oldmode: old_mode,
1016 x: args[i].value, unsignedp: args[i].unsignedp);
1017
1018 /* If the value is a non-legitimate constant, force it into a
1019 pseudo now. TLS symbols sometimes need a call to resolve. */
1020 if (CONSTANT_P (args[i].value)
1021 && (!targetm.legitimate_constant_p (args[i].mode, args[i].value)
1022 || targetm.precompute_tls_p (args[i].mode, args[i].value)))
1023 args[i].value = force_reg (args[i].mode, args[i].value);
1024
1025 /* If we're going to have to load the value by parts, pull the
1026 parts into pseudos. The part extraction process can involve
1027 non-trivial computation. */
1028 if (GET_CODE (args[i].reg) == PARALLEL)
1029 {
1030 tree type = TREE_TYPE (args[i].tree_value);
1031 args[i].parallel_value
1032 = emit_group_load_into_temps (args[i].reg, args[i].value,
1033 type, int_size_in_bytes (type));
1034 }
1035
1036 /* If the value is expensive, and we are inside an appropriately
1037 short loop, put the value into a pseudo and then put the pseudo
1038 into the hard reg.
1039
1040 For small register classes, also do this if this call uses
1041 register parameters. This is to avoid reload conflicts while
1042 loading the parameters registers. */
1043
1044 else if ((! (REG_P (args[i].value)
1045 || (GET_CODE (args[i].value) == SUBREG
1046 && REG_P (SUBREG_REG (args[i].value)))))
1047 && args[i].mode != BLKmode
1048 && (set_src_cost (x: args[i].value, mode: args[i].mode,
1049 speed_p: optimize_insn_for_speed_p ())
1050 > COSTS_N_INSNS (1))
1051 && ((*reg_parm_seen
1052 && targetm.small_register_classes_for_mode_p (args[i].mode))
1053 || optimize))
1054 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
1055 }
1056}
1057
1058#ifdef REG_PARM_STACK_SPACE
1059
1060 /* The argument list is the property of the called routine and it
1061 may clobber it. If the fixed area has been used for previous
1062 parameters, we must save and restore it. */
1063
1064static rtx
1065save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
1066{
1067 unsigned int low;
1068 unsigned int high;
1069
1070 /* Compute the boundary of the area that needs to be saved, if any. */
1071 high = reg_parm_stack_space;
1072 if (ARGS_GROW_DOWNWARD)
1073 high += 1;
1074
1075 if (high > highest_outgoing_arg_in_use)
1076 high = highest_outgoing_arg_in_use;
1077
1078 for (low = 0; low < high; low++)
1079 if (stack_usage_map[low] != 0 || low >= stack_usage_watermark)
1080 {
1081 int num_to_save;
1082 machine_mode save_mode;
1083 int delta;
1084 rtx addr;
1085 rtx stack_area;
1086 rtx save_area;
1087
1088 while (stack_usage_map[--high] == 0)
1089 ;
1090
1091 *low_to_save = low;
1092 *high_to_save = high;
1093
1094 num_to_save = high - low + 1;
1095
1096 /* If we don't have the required alignment, must do this
1097 in BLKmode. */
1098 scalar_int_mode imode;
1099 if (int_mode_for_size (size: num_to_save * BITS_PER_UNIT, limit: 1).exists (mode: &imode)
1100 && (low & (MIN (GET_MODE_SIZE (imode),
1101 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)) == 0)
1102 save_mode = imode;
1103 else
1104 save_mode = BLKmode;
1105
1106 if (ARGS_GROW_DOWNWARD)
1107 delta = -high;
1108 else
1109 delta = low;
1110
1111 addr = plus_constant (Pmode, argblock, delta);
1112 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
1113
1114 set_mem_align (stack_area, PARM_BOUNDARY);
1115 if (save_mode == BLKmode)
1116 {
1117 save_area = assign_stack_temp (BLKmode, num_to_save);
1118 emit_block_move (validize_mem (save_area), stack_area,
1119 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
1120 }
1121 else
1122 {
1123 save_area = gen_reg_rtx (save_mode);
1124 emit_move_insn (save_area, stack_area);
1125 }
1126
1127 return save_area;
1128 }
1129
1130 return NULL_RTX;
1131}
1132
1133static void
1134restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
1135{
1136 machine_mode save_mode = GET_MODE (save_area);
1137 int delta;
1138 rtx addr, stack_area;
1139
1140 if (ARGS_GROW_DOWNWARD)
1141 delta = -high_to_save;
1142 else
1143 delta = low_to_save;
1144
1145 addr = plus_constant (Pmode, argblock, delta);
1146 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
1147 set_mem_align (stack_area, PARM_BOUNDARY);
1148
1149 if (save_mode != BLKmode)
1150 emit_move_insn (stack_area, save_area);
1151 else
1152 emit_block_move (stack_area, validize_mem (save_area),
1153 GEN_INT (high_to_save - low_to_save + 1),
1154 BLOCK_OP_CALL_PARM);
1155}
1156#endif /* REG_PARM_STACK_SPACE */
1157
1158/* If any elements in ARGS refer to parameters that are to be passed in
1159 registers, but not in memory, and whose alignment does not permit a
1160 direct copy into registers. Copy the values into a group of pseudos
1161 which we will later copy into the appropriate hard registers.
1162
1163 Pseudos for each unaligned argument will be stored into the array
1164 args[argnum].aligned_regs. The caller is responsible for deallocating
1165 the aligned_regs array if it is nonzero. */
1166
1167static void
1168store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
1169{
1170 int i, j;
1171
1172 for (i = 0; i < num_actuals; i++)
1173 if (args[i].reg != 0 && ! args[i].pass_on_stack
1174 && GET_CODE (args[i].reg) != PARALLEL
1175 && args[i].mode == BLKmode
1176 && MEM_P (args[i].value)
1177 && (MEM_ALIGN (args[i].value)
1178 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1179 {
1180 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1181 int endian_correction = 0;
1182
1183 if (args[i].partial)
1184 {
1185 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
1186 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
1187 }
1188 else
1189 {
1190 args[i].n_aligned_regs
1191 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1192 }
1193
1194 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
1195
1196 /* Structures smaller than a word are normally aligned to the
1197 least significant byte. On a BYTES_BIG_ENDIAN machine,
1198 this means we must skip the empty high order bytes when
1199 calculating the bit offset. */
1200 if (bytes < UNITS_PER_WORD
1201#ifdef BLOCK_REG_PADDING
1202 && (BLOCK_REG_PADDING (args[i].mode,
1203 TREE_TYPE (args[i].tree_value), 1)
1204 == PAD_DOWNWARD)
1205#else
1206 && BYTES_BIG_ENDIAN
1207#endif
1208 )
1209 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
1210
1211 for (j = 0; j < args[i].n_aligned_regs; j++)
1212 {
1213 rtx reg = gen_reg_rtx (word_mode);
1214 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1215 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
1216
1217 args[i].aligned_regs[j] = reg;
1218 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
1219 word_mode, word_mode, false, NULL);
1220
1221 /* There is no need to restrict this code to loading items
1222 in TYPE_ALIGN sized hunks. The bitfield instructions can
1223 load up entire word sized registers efficiently.
1224
1225 ??? This may not be needed anymore.
1226 We use to emit a clobber here but that doesn't let later
1227 passes optimize the instructions we emit. By storing 0 into
1228 the register later passes know the first AND to zero out the
1229 bitfield being set in the register is unnecessary. The store
1230 of 0 will be deleted as will at least the first AND. */
1231
1232 emit_move_insn (reg, const0_rtx);
1233
1234 bytes -= bitsize / BITS_PER_UNIT;
1235 store_bit_field (reg, bitsize, endian_correction, 0, 0,
1236 word_mode, word, false, false);
1237 }
1238 }
1239}
1240
1241/* Issue an error if CALL_EXPR was flagged as requiring
1242 tall-call optimization. */
1243
1244void
1245maybe_complain_about_tail_call (tree call_expr, const char *reason)
1246{
1247 gcc_assert (TREE_CODE (call_expr) == CALL_EXPR);
1248 if (!CALL_EXPR_MUST_TAIL_CALL (call_expr))
1249 return;
1250
1251 error_at (EXPR_LOCATION (call_expr), "cannot tail-call: %s", reason);
1252}
1253
1254/* Fill in ARGS_SIZE and ARGS array based on the parameters found in
1255 CALL_EXPR EXP.
1256
1257 NUM_ACTUALS is the total number of parameters.
1258
1259 N_NAMED_ARGS is the total number of named arguments.
1260
1261 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
1262 value, or null.
1263
1264 FNDECL is the tree code for the target of this call (if known)
1265
1266 ARGS_SO_FAR holds state needed by the target to know where to place
1267 the next argument.
1268
1269 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1270 for arguments which are passed in registers.
1271
1272 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1273 and may be modified by this routine.
1274
1275 OLD_PENDING_ADJ and FLAGS are pointers to integer flags which
1276 may be modified by this routine.
1277
1278 MUST_PREALLOCATE is a pointer to bool which may be
1279 modified by this routine.
1280
1281 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
1282 that requires allocation of stack space.
1283
1284 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
1285 the thunked-to function. */
1286
1287static void
1288initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
1289 struct arg_data *args,
1290 struct args_size *args_size,
1291 int n_named_args ATTRIBUTE_UNUSED,
1292 tree exp, tree struct_value_addr_value,
1293 tree fndecl, tree fntype,
1294 cumulative_args_t args_so_far,
1295 int reg_parm_stack_space,
1296 rtx *old_stack_level,
1297 poly_int64 *old_pending_adj,
1298 bool *must_preallocate, int *ecf_flags,
1299 bool *may_tailcall, bool call_from_thunk_p)
1300{
1301 CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (arg: args_so_far);
1302 location_t loc = EXPR_LOCATION (exp);
1303
1304 /* Count arg position in order args appear. */
1305 int argpos;
1306
1307 int i;
1308
1309 args_size->constant = 0;
1310 args_size->var = 0;
1311
1312 /* In this loop, we consider args in the order they are written.
1313 We fill up ARGS from the back. */
1314
1315 i = num_actuals - 1;
1316 {
1317 int j = i;
1318 call_expr_arg_iterator iter;
1319 tree arg;
1320
1321 if (struct_value_addr_value)
1322 {
1323 args[j].tree_value = struct_value_addr_value;
1324 j--;
1325 }
1326 argpos = 0;
1327 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
1328 {
1329 tree argtype = TREE_TYPE (arg);
1330
1331 if (targetm.calls.split_complex_arg
1332 && argtype
1333 && TREE_CODE (argtype) == COMPLEX_TYPE
1334 && targetm.calls.split_complex_arg (argtype))
1335 {
1336 tree subtype = TREE_TYPE (argtype);
1337 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
1338 j--;
1339 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
1340 }
1341 else
1342 args[j].tree_value = arg;
1343 j--;
1344 argpos++;
1345 }
1346 }
1347
1348 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1349 for (argpos = 0; argpos < num_actuals; i--, argpos++)
1350 {
1351 tree type = TREE_TYPE (args[i].tree_value);
1352 int unsignedp;
1353
1354 /* Replace erroneous argument with constant zero. */
1355 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1356 args[i].tree_value = integer_zero_node, type = integer_type_node;
1357
1358 /* If TYPE is a transparent union or record, pass things the way
1359 we would pass the first field of the union or record. We have
1360 already verified that the modes are the same. */
1361 if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type))
1362 type = TREE_TYPE (first_field (type));
1363
1364 /* Decide where to pass this arg.
1365
1366 args[i].reg is nonzero if all or part is passed in registers.
1367
1368 args[i].partial is nonzero if part but not all is passed in registers,
1369 and the exact value says how many bytes are passed in registers.
1370
1371 args[i].pass_on_stack is true if the argument must at least be
1372 computed on the stack. It may then be loaded back into registers
1373 if args[i].reg is nonzero.
1374
1375 These decisions are driven by the FUNCTION_... macros and must agree
1376 with those made by function.cc. */
1377
1378 /* See if this argument should be passed by invisible reference. */
1379 function_arg_info arg (type, argpos < n_named_args);
1380 if (pass_by_reference (ca: args_so_far_pnt, arg))
1381 {
1382 const bool callee_copies
1383 = reference_callee_copied (ca: args_so_far_pnt, arg);
1384 tree base;
1385
1386 /* If we're compiling a thunk, pass directly the address of an object
1387 already in memory, instead of making a copy. Likewise if we want
1388 to make the copy in the callee instead of the caller. */
1389 if ((call_from_thunk_p || callee_copies)
1390 && TREE_CODE (args[i].tree_value) != WITH_SIZE_EXPR
1391 && ((base = get_base_address (t: args[i].tree_value)), true)
1392 && TREE_CODE (base) != SSA_NAME
1393 && (!DECL_P (base) || MEM_P (DECL_RTL (base))))
1394 {
1395 /* We may have turned the parameter value into an SSA name.
1396 Go back to the original parameter so we can take the
1397 address. */
1398 if (TREE_CODE (args[i].tree_value) == SSA_NAME)
1399 {
1400 gcc_assert (SSA_NAME_IS_DEFAULT_DEF (args[i].tree_value));
1401 args[i].tree_value = SSA_NAME_VAR (args[i].tree_value);
1402 gcc_assert (TREE_CODE (args[i].tree_value) == PARM_DECL);
1403 }
1404 /* Argument setup code may have copied the value to register. We
1405 revert that optimization now because the tail call code must
1406 use the original location. */
1407 if (TREE_CODE (args[i].tree_value) == PARM_DECL
1408 && !MEM_P (DECL_RTL (args[i].tree_value))
1409 && DECL_INCOMING_RTL (args[i].tree_value)
1410 && MEM_P (DECL_INCOMING_RTL (args[i].tree_value)))
1411 set_decl_rtl (args[i].tree_value,
1412 DECL_INCOMING_RTL (args[i].tree_value));
1413
1414 mark_addressable (args[i].tree_value);
1415
1416 /* We can't use sibcalls if a callee-copied argument is
1417 stored in the current function's frame. */
1418 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
1419 {
1420 *may_tailcall = false;
1421 maybe_complain_about_tail_call (call_expr: exp,
1422 reason: "a callee-copied argument is"
1423 " stored in the current"
1424 " function's frame");
1425 }
1426
1427 args[i].tree_value = build_fold_addr_expr_loc (loc,
1428 args[i].tree_value);
1429 type = TREE_TYPE (args[i].tree_value);
1430
1431 if (*ecf_flags & ECF_CONST)
1432 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
1433 }
1434 else
1435 {
1436 /* We make a copy of the object and pass the address to the
1437 function being called. */
1438 rtx copy;
1439
1440 if (!COMPLETE_TYPE_P (type)
1441 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
1442 || (flag_stack_check == GENERIC_STACK_CHECK
1443 && compare_tree_int (TYPE_SIZE_UNIT (type),
1444 STACK_CHECK_MAX_VAR_SIZE) > 0))
1445 {
1446 /* This is a variable-sized object. Make space on the stack
1447 for it. */
1448 rtx size_rtx = expr_size (args[i].tree_value);
1449
1450 if (*old_stack_level == 0)
1451 {
1452 emit_stack_save (SAVE_BLOCK, old_stack_level);
1453 *old_pending_adj = pending_stack_adjust;
1454 pending_stack_adjust = 0;
1455 }
1456
1457 /* We can pass TRUE as the 4th argument because we just
1458 saved the stack pointer and will restore it right after
1459 the call. */
1460 copy = allocate_dynamic_stack_space (size_rtx,
1461 TYPE_ALIGN (type),
1462 TYPE_ALIGN (type),
1463 max_int_size_in_bytes
1464 (type),
1465 true);
1466 copy = gen_rtx_MEM (BLKmode, copy);
1467 set_mem_attributes (copy, type, 1);
1468 }
1469 else
1470 copy = assign_temp (type, 1, 0);
1471
1472 store_expr (args[i].tree_value, copy, 0, false, false);
1473
1474 /* Just change the const function to pure and then let
1475 the next test clear the pure based on
1476 callee_copies. */
1477 if (*ecf_flags & ECF_CONST)
1478 {
1479 *ecf_flags &= ~ECF_CONST;
1480 *ecf_flags |= ECF_PURE;
1481 }
1482
1483 if (!callee_copies && *ecf_flags & ECF_PURE)
1484 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
1485
1486 args[i].tree_value
1487 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
1488 type = TREE_TYPE (args[i].tree_value);
1489 *may_tailcall = false;
1490 maybe_complain_about_tail_call (call_expr: exp,
1491 reason: "argument must be passed"
1492 " by copying");
1493 }
1494 arg.pass_by_reference = true;
1495 }
1496
1497 unsignedp = TYPE_UNSIGNED (type);
1498 arg.type = type;
1499 arg.mode
1500 = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
1501 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
1502
1503 args[i].unsignedp = unsignedp;
1504 args[i].mode = arg.mode;
1505
1506 targetm.calls.warn_parameter_passing_abi (args_so_far, type);
1507
1508 args[i].reg = targetm.calls.function_arg (args_so_far, arg);
1509
1510 /* If this is a sibling call and the machine has register windows, the
1511 register window has to be unwinded before calling the routine, so
1512 arguments have to go into the incoming registers. */
1513 if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
1514 args[i].tail_call_reg
1515 = targetm.calls.function_incoming_arg (args_so_far, arg);
1516 else
1517 args[i].tail_call_reg = args[i].reg;
1518
1519 if (args[i].reg)
1520 args[i].partial = targetm.calls.arg_partial_bytes (args_so_far, arg);
1521
1522 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (arg);
1523
1524 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1525 it means that we are to pass this arg in the register(s) designated
1526 by the PARALLEL, but also to pass it in the stack. */
1527 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1528 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1529 args[i].pass_on_stack = true;
1530
1531 /* If this is an addressable type, we must preallocate the stack
1532 since we must evaluate the object into its final location.
1533
1534 If this is to be passed in both registers and the stack, it is simpler
1535 to preallocate. */
1536 if (TREE_ADDRESSABLE (type)
1537 || (args[i].pass_on_stack && args[i].reg != 0))
1538 *must_preallocate = true;
1539
1540 /* Compute the stack-size of this argument. */
1541 if (args[i].reg == 0 || args[i].partial != 0
1542 || reg_parm_stack_space > 0
1543 || args[i].pass_on_stack)
1544 locate_and_pad_parm (arg.mode, type,
1545#ifdef STACK_PARMS_IN_REG_PARM_AREA
1546 1,
1547#else
1548 args[i].reg != 0,
1549#endif
1550 reg_parm_stack_space,
1551 args[i].pass_on_stack ? 0 : args[i].partial,
1552 fndecl, args_size, &args[i].locate);
1553#ifdef BLOCK_REG_PADDING
1554 else
1555 /* The argument is passed entirely in registers. See at which
1556 end it should be padded. */
1557 args[i].locate.where_pad =
1558 BLOCK_REG_PADDING (arg.mode, type,
1559 int_size_in_bytes (type) <= UNITS_PER_WORD);
1560#endif
1561
1562 /* Update ARGS_SIZE, the total stack space for args so far. */
1563
1564 args_size->constant += args[i].locate.size.constant;
1565 if (args[i].locate.size.var)
1566 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1567
1568 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1569 have been used, etc. */
1570
1571 /* ??? Traditionally we've passed TYPE_MODE here, instead of the
1572 promoted_mode used for function_arg above. However, the
1573 corresponding handling of incoming arguments in function.cc
1574 does pass the promoted mode. */
1575 arg.mode = TYPE_MODE (type);
1576 targetm.calls.function_arg_advance (args_so_far, arg);
1577 }
1578}
1579
1580/* Update ARGS_SIZE to contain the total size for the argument block.
1581 Return the original constant component of the argument block's size.
1582
1583 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1584 for arguments passed in registers. */
1585
1586static poly_int64
1587compute_argument_block_size (int reg_parm_stack_space,
1588 struct args_size *args_size,
1589 tree fndecl ATTRIBUTE_UNUSED,
1590 tree fntype ATTRIBUTE_UNUSED,
1591 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1592{
1593 poly_int64 unadjusted_args_size = args_size->constant;
1594
1595 /* For accumulate outgoing args mode we don't need to align, since the frame
1596 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1597 backends from generating misaligned frame sizes. */
1598 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1599 preferred_stack_boundary = STACK_BOUNDARY;
1600
1601 /* Compute the actual size of the argument block required. The variable
1602 and constant sizes must be combined, the size may have to be rounded,
1603 and there may be a minimum required size. */
1604
1605 if (args_size->var)
1606 {
1607 args_size->var = ARGS_SIZE_TREE (*args_size);
1608 args_size->constant = 0;
1609
1610 preferred_stack_boundary /= BITS_PER_UNIT;
1611 if (preferred_stack_boundary > 1)
1612 {
1613 /* We don't handle this case yet. To handle it correctly we have
1614 to add the delta, round and subtract the delta.
1615 Currently no machine description requires this support. */
1616 gcc_assert (multiple_p (stack_pointer_delta,
1617 preferred_stack_boundary));
1618 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1619 }
1620
1621 if (reg_parm_stack_space > 0)
1622 {
1623 args_size->var
1624 = size_binop (MAX_EXPR, args_size->var,
1625 ssize_int (reg_parm_stack_space));
1626
1627 /* The area corresponding to register parameters is not to count in
1628 the size of the block we need. So make the adjustment. */
1629 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
1630 args_size->var
1631 = size_binop (MINUS_EXPR, args_size->var,
1632 ssize_int (reg_parm_stack_space));
1633 }
1634 }
1635 else
1636 {
1637 preferred_stack_boundary /= BITS_PER_UNIT;
1638 if (preferred_stack_boundary < 1)
1639 preferred_stack_boundary = 1;
1640 args_size->constant = (aligned_upper_bound (value: args_size->constant
1641 + stack_pointer_delta,
1642 align: preferred_stack_boundary)
1643 - stack_pointer_delta);
1644
1645 args_size->constant = upper_bound (a: args_size->constant,
1646 b: reg_parm_stack_space);
1647
1648 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
1649 args_size->constant -= reg_parm_stack_space;
1650 }
1651 return unadjusted_args_size;
1652}
1653
1654/* Precompute parameters as needed for a function call.
1655
1656 FLAGS is mask of ECF_* constants.
1657
1658 NUM_ACTUALS is the number of arguments.
1659
1660 ARGS is an array containing information for each argument; this
1661 routine fills in the INITIAL_VALUE and VALUE fields for each
1662 precomputed argument. */
1663
1664static void
1665precompute_arguments (int num_actuals, struct arg_data *args)
1666{
1667 int i;
1668
1669 /* If this is a libcall, then precompute all arguments so that we do not
1670 get extraneous instructions emitted as part of the libcall sequence. */
1671
1672 /* If we preallocated the stack space, and some arguments must be passed
1673 on the stack, then we must precompute any parameter which contains a
1674 function call which will store arguments on the stack.
1675 Otherwise, evaluating the parameter may clobber previous parameters
1676 which have already been stored into the stack. (we have code to avoid
1677 such case by saving the outgoing stack arguments, but it results in
1678 worse code) */
1679 if (!ACCUMULATE_OUTGOING_ARGS)
1680 return;
1681
1682 for (i = 0; i < num_actuals; i++)
1683 {
1684 tree type;
1685 machine_mode mode;
1686
1687 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
1688 continue;
1689
1690 /* If this is an addressable type, we cannot pre-evaluate it. */
1691 type = TREE_TYPE (args[i].tree_value);
1692 gcc_assert (!TREE_ADDRESSABLE (type));
1693
1694 args[i].initial_value = args[i].value
1695 = expand_normal (exp: args[i].tree_value);
1696
1697 mode = TYPE_MODE (type);
1698 if (mode != args[i].mode)
1699 {
1700 int unsignedp = args[i].unsignedp;
1701 args[i].value
1702 = convert_modes (mode: args[i].mode, oldmode: mode,
1703 x: args[i].value, unsignedp: args[i].unsignedp);
1704
1705 /* CSE will replace this only if it contains args[i].value
1706 pseudo, so convert it down to the declared mode using
1707 a SUBREG. */
1708 if (REG_P (args[i].value)
1709 && GET_MODE_CLASS (args[i].mode) == MODE_INT
1710 && promote_mode (type, mode, &unsignedp) != args[i].mode)
1711 {
1712 args[i].initial_value
1713 = gen_lowpart_SUBREG (mode, args[i].value);
1714 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1715 SUBREG_PROMOTED_SET (args[i].initial_value, args[i].unsignedp);
1716 }
1717 }
1718 }
1719}
1720
1721/* Given the current state of MUST_PREALLOCATE and information about
1722 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1723 compute and return the final value for MUST_PREALLOCATE. */
1724
1725static bool
1726finalize_must_preallocate (bool must_preallocate, int num_actuals,
1727 struct arg_data *args, struct args_size *args_size)
1728{
1729 /* See if we have or want to preallocate stack space.
1730
1731 If we would have to push a partially-in-regs parm
1732 before other stack parms, preallocate stack space instead.
1733
1734 If the size of some parm is not a multiple of the required stack
1735 alignment, we must preallocate.
1736
1737 If the total size of arguments that would otherwise create a copy in
1738 a temporary (such as a CALL) is more than half the total argument list
1739 size, preallocation is faster.
1740
1741 Another reason to preallocate is if we have a machine (like the m88k)
1742 where stack alignment is required to be maintained between every
1743 pair of insns, not just when the call is made. However, we assume here
1744 that such machines either do not have push insns (and hence preallocation
1745 would occur anyway) or the problem is taken care of with
1746 PUSH_ROUNDING. */
1747
1748 if (! must_preallocate)
1749 {
1750 bool partial_seen = false;
1751 poly_int64 copy_to_evaluate_size = 0;
1752 int i;
1753
1754 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1755 {
1756 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1757 partial_seen = true;
1758 else if (partial_seen && args[i].reg == 0)
1759 must_preallocate = true;
1760
1761 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1762 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1763 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1764 || TREE_CODE (args[i].tree_value) == COND_EXPR
1765 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1766 copy_to_evaluate_size
1767 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1768 }
1769
1770 if (maybe_ne (a: args_size->constant, b: 0)
1771 && maybe_ge (copy_to_evaluate_size * 2, args_size->constant))
1772 must_preallocate = true;
1773 }
1774 return must_preallocate;
1775}
1776
1777/* If we preallocated stack space, compute the address of each argument
1778 and store it into the ARGS array.
1779
1780 We need not ensure it is a valid memory address here; it will be
1781 validized when it is used.
1782
1783 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1784
1785static void
1786compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1787{
1788 if (argblock)
1789 {
1790 rtx arg_reg = argblock;
1791 int i;
1792 poly_int64 arg_offset = 0;
1793
1794 if (GET_CODE (argblock) == PLUS)
1795 {
1796 arg_reg = XEXP (argblock, 0);
1797 arg_offset = rtx_to_poly_int64 (XEXP (argblock, 1));
1798 }
1799
1800 for (i = 0; i < num_actuals; i++)
1801 {
1802 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1803 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1804 rtx addr;
1805 unsigned int align, boundary;
1806 poly_uint64 units_on_stack = 0;
1807 machine_mode partial_mode = VOIDmode;
1808
1809 /* Skip this parm if it will not be passed on the stack. */
1810 if (! args[i].pass_on_stack
1811 && args[i].reg != 0
1812 && args[i].partial == 0)
1813 continue;
1814
1815 if (TYPE_EMPTY_P (TREE_TYPE (args[i].tree_value)))
1816 continue;
1817
1818 addr = simplify_gen_binary (code: PLUS, Pmode, op0: arg_reg, op1: offset);
1819 addr = plus_constant (Pmode, addr, arg_offset);
1820
1821 if (args[i].partial != 0)
1822 {
1823 /* Only part of the parameter is being passed on the stack.
1824 Generate a simple memory reference of the correct size. */
1825 units_on_stack = args[i].locate.size.constant;
1826 poly_uint64 bits_on_stack = units_on_stack * BITS_PER_UNIT;
1827 partial_mode = int_mode_for_size (size: bits_on_stack, limit: 1).else_blk ();
1828 args[i].stack = gen_rtx_MEM (partial_mode, addr);
1829 set_mem_size (args[i].stack, units_on_stack);
1830 }
1831 else
1832 {
1833 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1834 set_mem_attributes (args[i].stack,
1835 TREE_TYPE (args[i].tree_value), 1);
1836 }
1837 align = BITS_PER_UNIT;
1838 boundary = args[i].locate.boundary;
1839 poly_int64 offset_val;
1840 if (args[i].locate.where_pad != PAD_DOWNWARD)
1841 align = boundary;
1842 else if (poly_int_rtx_p (x: offset, res: &offset_val))
1843 {
1844 align = least_bit_hwi (x: boundary);
1845 unsigned int offset_align
1846 = known_alignment (a: offset_val) * BITS_PER_UNIT;
1847 if (offset_align != 0)
1848 align = MIN (align, offset_align);
1849 }
1850 set_mem_align (args[i].stack, align);
1851
1852 addr = simplify_gen_binary (code: PLUS, Pmode, op0: arg_reg, op1: slot_offset);
1853 addr = plus_constant (Pmode, addr, arg_offset);
1854
1855 if (args[i].partial != 0)
1856 {
1857 /* Only part of the parameter is being passed on the stack.
1858 Generate a simple memory reference of the correct size.
1859 */
1860 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
1861 set_mem_size (args[i].stack_slot, units_on_stack);
1862 }
1863 else
1864 {
1865 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1866 set_mem_attributes (args[i].stack_slot,
1867 TREE_TYPE (args[i].tree_value), 1);
1868 }
1869 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
1870
1871 /* Function incoming arguments may overlap with sibling call
1872 outgoing arguments and we cannot allow reordering of reads
1873 from function arguments with stores to outgoing arguments
1874 of sibling calls. */
1875 set_mem_alias_set (args[i].stack, 0);
1876 set_mem_alias_set (args[i].stack_slot, 0);
1877 }
1878 }
1879}
1880
1881/* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1882 in a call instruction.
1883
1884 FNDECL is the tree node for the target function. For an indirect call
1885 FNDECL will be NULL_TREE.
1886
1887 ADDR is the operand 0 of CALL_EXPR for this call. */
1888
1889static rtx
1890rtx_for_function_call (tree fndecl, tree addr)
1891{
1892 rtx funexp;
1893
1894 /* Get the function to call, in the form of RTL. */
1895 if (fndecl)
1896 {
1897 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
1898 TREE_USED (fndecl) = 1;
1899
1900 /* Get a SYMBOL_REF rtx for the function address. */
1901 funexp = XEXP (DECL_RTL (fndecl), 0);
1902 }
1903 else
1904 /* Generate an rtx (probably a pseudo-register) for the address. */
1905 {
1906 push_temp_slots ();
1907 funexp = expand_normal (exp: addr);
1908 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1909 }
1910 return funexp;
1911}
1912
1913/* Return the static chain for this function, if any. */
1914
1915rtx
1916rtx_for_static_chain (const_tree fndecl_or_type, bool incoming_p)
1917{
1918 if (DECL_P (fndecl_or_type) && !DECL_STATIC_CHAIN (fndecl_or_type))
1919 return NULL;
1920
1921 return targetm.calls.static_chain (fndecl_or_type, incoming_p);
1922}
1923
1924/* Internal state for internal_arg_pointer_based_exp and its helpers. */
1925static struct
1926{
1927 /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
1928 or NULL_RTX if none has been scanned yet. */
1929 rtx_insn *scan_start;
1930 /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
1931 based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the
1932 pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
1933 with fixed offset, or PC if this is with variable or unknown offset. */
1934 vec<rtx> cache;
1935} internal_arg_pointer_exp_state;
1936
1937static rtx internal_arg_pointer_based_exp (const_rtx, bool);
1938
1939/* Helper function for internal_arg_pointer_based_exp. Scan insns in
1940 the tail call sequence, starting with first insn that hasn't been
1941 scanned yet, and note for each pseudo on the LHS whether it is based
1942 on crtl->args.internal_arg_pointer or not, and what offset from that
1943 that pointer it has. */
1944
1945static void
1946internal_arg_pointer_based_exp_scan (void)
1947{
1948 rtx_insn *insn, *scan_start = internal_arg_pointer_exp_state.scan_start;
1949
1950 if (scan_start == NULL_RTX)
1951 insn = get_insns ();
1952 else
1953 insn = NEXT_INSN (insn: scan_start);
1954
1955 while (insn)
1956 {
1957 rtx set = single_set (insn);
1958 if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set)))
1959 {
1960 rtx val = NULL_RTX;
1961 unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
1962 /* Punt on pseudos set multiple times. */
1963 if (idx < internal_arg_pointer_exp_state.cache.length ()
1964 && (internal_arg_pointer_exp_state.cache[idx]
1965 != NULL_RTX))
1966 val = pc_rtx;
1967 else
1968 val = internal_arg_pointer_based_exp (SET_SRC (set), false);
1969 if (val != NULL_RTX)
1970 {
1971 if (idx >= internal_arg_pointer_exp_state.cache.length ())
1972 internal_arg_pointer_exp_state.cache
1973 .safe_grow_cleared (len: idx + 1, exact: true);
1974 internal_arg_pointer_exp_state.cache[idx] = val;
1975 }
1976 }
1977 if (NEXT_INSN (insn) == NULL_RTX)
1978 scan_start = insn;
1979 insn = NEXT_INSN (insn);
1980 }
1981
1982 internal_arg_pointer_exp_state.scan_start = scan_start;
1983}
1984
1985/* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return
1986 NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
1987 it with fixed offset, or PC if this is with variable or unknown offset.
1988 TOPLEVEL is true if the function is invoked at the topmost level. */
1989
1990static rtx
1991internal_arg_pointer_based_exp (const_rtx rtl, bool toplevel)
1992{
1993 if (CONSTANT_P (rtl))
1994 return NULL_RTX;
1995
1996 if (rtl == crtl->args.internal_arg_pointer)
1997 return const0_rtx;
1998
1999 if (REG_P (rtl) && HARD_REGISTER_P (rtl))
2000 return NULL_RTX;
2001
2002 poly_int64 offset;
2003 if (GET_CODE (rtl) == PLUS && poly_int_rtx_p (XEXP (rtl, 1), res: &offset))
2004 {
2005 rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
2006 if (val == NULL_RTX || val == pc_rtx)
2007 return val;
2008 return plus_constant (Pmode, val, offset);
2009 }
2010
2011 /* When called at the topmost level, scan pseudo assignments in between the
2012 last scanned instruction in the tail call sequence and the latest insn
2013 in that sequence. */
2014 if (toplevel)
2015 internal_arg_pointer_based_exp_scan ();
2016
2017 if (REG_P (rtl))
2018 {
2019 unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
2020 if (idx < internal_arg_pointer_exp_state.cache.length ())
2021 return internal_arg_pointer_exp_state.cache[idx];
2022
2023 return NULL_RTX;
2024 }
2025
2026 subrtx_iterator::array_type array;
2027 FOR_EACH_SUBRTX (iter, array, rtl, NONCONST)
2028 {
2029 const_rtx x = *iter;
2030 if (REG_P (x) && internal_arg_pointer_based_exp (rtl: x, toplevel: false) != NULL_RTX)
2031 return pc_rtx;
2032 if (MEM_P (x))
2033 iter.skip_subrtxes ();
2034 }
2035
2036 return NULL_RTX;
2037}
2038
2039/* Return true if SIZE bytes starting from address ADDR might overlap an
2040 already-clobbered argument area. This function is used to determine
2041 if we should give up a sibcall. */
2042
2043static bool
2044mem_might_overlap_already_clobbered_arg_p (rtx addr, poly_uint64 size)
2045{
2046 poly_int64 i;
2047 unsigned HOST_WIDE_INT start, end;
2048 rtx val;
2049
2050 if (bitmap_empty_p (stored_args_map)
2051 && stored_args_watermark == HOST_WIDE_INT_M1U)
2052 return false;
2053 val = internal_arg_pointer_based_exp (rtl: addr, toplevel: true);
2054 if (val == NULL_RTX)
2055 return false;
2056 else if (!poly_int_rtx_p (x: val, res: &i))
2057 return true;
2058
2059 if (known_eq (size, 0U))
2060 return false;
2061
2062 if (STACK_GROWS_DOWNWARD)
2063 i -= crtl->args.pretend_args_size;
2064 else
2065 i += crtl->args.pretend_args_size;
2066
2067 if (ARGS_GROW_DOWNWARD)
2068 i = -i - size;
2069
2070 /* We can ignore any references to the function's pretend args,
2071 which at this point would manifest as negative values of I. */
2072 if (known_le (i, 0) && known_le (size, poly_uint64 (-i)))
2073 return false;
2074
2075 start = maybe_lt (a: i, b: 0) ? 0 : constant_lower_bound (a: i);
2076 if (!(i + size).is_constant (const_value: &end))
2077 end = HOST_WIDE_INT_M1U;
2078
2079 if (end > stored_args_watermark)
2080 return true;
2081
2082 end = MIN (end, SBITMAP_SIZE (stored_args_map));
2083 for (unsigned HOST_WIDE_INT k = start; k < end; ++k)
2084 if (bitmap_bit_p (map: stored_args_map, bitno: k))
2085 return true;
2086
2087 return false;
2088}
2089
2090/* Do the register loads required for any wholly-register parms or any
2091 parms which are passed both on the stack and in a register. Their
2092 expressions were already evaluated.
2093
2094 Mark all register-parms as living through the call, putting these USE
2095 insns in the CALL_INSN_FUNCTION_USAGE field.
2096
2097 When IS_SIBCALL, perform the check_sibcall_argument_overlap
2098 checking, setting *SIBCALL_FAILURE if appropriate. */
2099
2100static void
2101load_register_parameters (struct arg_data *args, int num_actuals,
2102 rtx *call_fusage, int flags, int is_sibcall,
2103 bool *sibcall_failure)
2104{
2105 int i, j;
2106
2107 for (i = 0; i < num_actuals; i++)
2108 {
2109 rtx reg = ((flags & ECF_SIBCALL)
2110 ? args[i].tail_call_reg : args[i].reg);
2111 if (reg)
2112 {
2113 int partial = args[i].partial;
2114 int nregs;
2115 poly_int64 size = 0;
2116 HOST_WIDE_INT const_size = 0;
2117 rtx_insn *before_arg = get_last_insn ();
2118 tree tree_value = args[i].tree_value;
2119 tree type = TREE_TYPE (tree_value);
2120 if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type))
2121 type = TREE_TYPE (first_field (type));
2122 /* Set non-negative if we must move a word at a time, even if
2123 just one word (e.g, partial == 4 && mode == DFmode). Set
2124 to -1 if we just use a normal move insn. This value can be
2125 zero if the argument is a zero size structure. */
2126 nregs = -1;
2127 if (GET_CODE (reg) == PARALLEL)
2128 ;
2129 else if (partial)
2130 {
2131 gcc_assert (partial % UNITS_PER_WORD == 0);
2132 nregs = partial / UNITS_PER_WORD;
2133 }
2134 else if (TYPE_MODE (type) == BLKmode)
2135 {
2136 /* Variable-sized parameters should be described by a
2137 PARALLEL instead. */
2138 const_size = int_size_in_bytes (type);
2139 gcc_assert (const_size >= 0);
2140 nregs = (const_size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2141 size = const_size;
2142 }
2143 else
2144 size = GET_MODE_SIZE (mode: args[i].mode);
2145
2146 /* Handle calls that pass values in multiple non-contiguous
2147 locations. The Irix 6 ABI has examples of this. */
2148
2149 if (GET_CODE (reg) == PARALLEL)
2150 emit_group_move (reg, args[i].parallel_value);
2151
2152 /* If simple case, just do move. If normal partial, store_one_arg
2153 has already loaded the register for us. In all other cases,
2154 load the register(s) from memory. */
2155
2156 else if (nregs == -1)
2157 {
2158 emit_move_insn (reg, args[i].value);
2159#ifdef BLOCK_REG_PADDING
2160 /* Handle case where we have a value that needs shifting
2161 up to the msb. eg. a QImode value and we're padding
2162 upward on a BYTES_BIG_ENDIAN machine. */
2163 if (args[i].locate.where_pad
2164 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))
2165 {
2166 gcc_checking_assert (ordered_p (size, UNITS_PER_WORD));
2167 if (maybe_lt (size, UNITS_PER_WORD))
2168 {
2169 rtx x;
2170 poly_int64 shift
2171 = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2172
2173 /* Assigning REG here rather than a temp makes
2174 CALL_FUSAGE report the whole reg as used.
2175 Strictly speaking, the call only uses SIZE
2176 bytes at the msb end, but it doesn't seem worth
2177 generating rtl to say that. */
2178 reg = gen_rtx_REG (word_mode, REGNO (reg));
2179 x = expand_shift (LSHIFT_EXPR, word_mode,
2180 reg, shift, reg, 1);
2181 if (x != reg)
2182 emit_move_insn (reg, x);
2183 }
2184 }
2185#endif
2186 }
2187
2188 /* If we have pre-computed the values to put in the registers in
2189 the case of non-aligned structures, copy them in now. */
2190
2191 else if (args[i].n_aligned_regs != 0)
2192 for (j = 0; j < args[i].n_aligned_regs; j++)
2193 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
2194 args[i].aligned_regs[j]);
2195
2196 /* If we need a single register and the source is a constant
2197 VAR_DECL with a simple constructor, expand that constructor
2198 via a pseudo rather than read from (possibly misaligned)
2199 memory. PR middle-end/95126. */
2200 else if (nregs == 1
2201 && partial == 0
2202 && !args[i].pass_on_stack
2203 && VAR_P (tree_value)
2204 && TREE_READONLY (tree_value)
2205 && !TREE_SIDE_EFFECTS (tree_value)
2206 && immediate_const_ctor_p (DECL_INITIAL (tree_value)))
2207 {
2208 rtx target = gen_reg_rtx (word_mode);
2209 store_constructor (DECL_INITIAL (tree_value), target, 0,
2210 int_expr_size (DECL_INITIAL (tree_value)),
2211 false);
2212 reg = gen_rtx_REG (word_mode, REGNO (reg));
2213 emit_move_insn (reg, target);
2214 }
2215 else if (partial == 0 || args[i].pass_on_stack)
2216 {
2217 /* SIZE and CONST_SIZE are 0 for partial arguments and
2218 the size of a BLKmode type otherwise. */
2219 gcc_checking_assert (known_eq (size, const_size));
2220 rtx mem = validize_mem (copy_rtx (args[i].value));
2221
2222 /* Check for overlap with already clobbered argument area,
2223 providing that this has non-zero size. */
2224 if (is_sibcall
2225 && const_size != 0
2226 && (mem_might_overlap_already_clobbered_arg_p
2227 (XEXP (args[i].value, 0), size: const_size)))
2228 *sibcall_failure = true;
2229
2230 if (const_size % UNITS_PER_WORD == 0
2231 || MEM_ALIGN (mem) % BITS_PER_WORD == 0)
2232 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
2233 else
2234 {
2235 if (nregs > 1)
2236 move_block_to_reg (REGNO (reg), mem, nregs - 1,
2237 args[i].mode);
2238 rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1);
2239 unsigned int bitoff = (nregs - 1) * BITS_PER_WORD;
2240 unsigned int bitsize = const_size * BITS_PER_UNIT - bitoff;
2241 rtx x = extract_bit_field (mem, bitsize, bitoff, 1, dest,
2242 word_mode, word_mode, false,
2243 NULL);
2244 if (BYTES_BIG_ENDIAN)
2245 x = expand_shift (LSHIFT_EXPR, word_mode, x,
2246 BITS_PER_WORD - bitsize, dest, 1);
2247 if (x != dest)
2248 emit_move_insn (dest, x);
2249 }
2250
2251 /* Handle a BLKmode that needs shifting. */
2252 if (nregs == 1 && const_size < UNITS_PER_WORD
2253#ifdef BLOCK_REG_PADDING
2254 && args[i].locate.where_pad == PAD_DOWNWARD
2255#else
2256 && BYTES_BIG_ENDIAN
2257#endif
2258 )
2259 {
2260 rtx dest = gen_rtx_REG (word_mode, REGNO (reg));
2261 int shift = (UNITS_PER_WORD - const_size) * BITS_PER_UNIT;
2262 enum tree_code dir = (BYTES_BIG_ENDIAN
2263 ? RSHIFT_EXPR : LSHIFT_EXPR);
2264 rtx x;
2265
2266 x = expand_shift (dir, word_mode, dest, shift, dest, 1);
2267 if (x != dest)
2268 emit_move_insn (dest, x);
2269 }
2270 }
2271
2272 /* When a parameter is a block, and perhaps in other cases, it is
2273 possible that it did a load from an argument slot that was
2274 already clobbered. */
2275 if (is_sibcall
2276 && check_sibcall_argument_overlap (before_arg, &args[i], false))
2277 *sibcall_failure = true;
2278
2279 /* Handle calls that pass values in multiple non-contiguous
2280 locations. The Irix 6 ABI has examples of this. */
2281 if (GET_CODE (reg) == PARALLEL)
2282 use_group_regs (call_fusage, reg);
2283 else if (nregs == -1)
2284 use_reg_mode (call_fusage, reg, TYPE_MODE (type));
2285 else if (nregs > 0)
2286 use_regs (call_fusage, REGNO (reg), nregs);
2287 }
2288 }
2289}
2290
2291/* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
2292 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
2293 bytes, then we would need to push some additional bytes to pad the
2294 arguments. So, we try to compute an adjust to the stack pointer for an
2295 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
2296 bytes. Then, when the arguments are pushed the stack will be perfectly
2297 aligned.
2298
2299 Return true if this optimization is possible, storing the adjustment
2300 in ADJUSTMENT_OUT and setting ARGS_SIZE->CONSTANT to the number of
2301 bytes that should be popped after the call. */
2302
2303static bool
2304combine_pending_stack_adjustment_and_call (poly_int64 *adjustment_out,
2305 poly_int64 unadjusted_args_size,
2306 struct args_size *args_size,
2307 unsigned int preferred_unit_stack_boundary)
2308{
2309 /* The number of bytes to pop so that the stack will be
2310 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
2311 poly_int64 adjustment;
2312 /* The alignment of the stack after the arguments are pushed, if we
2313 just pushed the arguments without adjust the stack here. */
2314 unsigned HOST_WIDE_INT unadjusted_alignment;
2315
2316 if (!known_misalignment (stack_pointer_delta + unadjusted_args_size,
2317 align: preferred_unit_stack_boundary,
2318 misalign: &unadjusted_alignment))
2319 return false;
2320
2321 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
2322 as possible -- leaving just enough left to cancel out the
2323 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
2324 PENDING_STACK_ADJUST is non-negative, and congruent to
2325 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
2326
2327 /* Begin by trying to pop all the bytes. */
2328 unsigned HOST_WIDE_INT tmp_misalignment;
2329 if (!known_misalignment (pending_stack_adjust,
2330 align: preferred_unit_stack_boundary,
2331 misalign: &tmp_misalignment))
2332 return false;
2333 unadjusted_alignment -= tmp_misalignment;
2334 adjustment = pending_stack_adjust;
2335 /* Push enough additional bytes that the stack will be aligned
2336 after the arguments are pushed. */
2337 if (preferred_unit_stack_boundary > 1 && unadjusted_alignment)
2338 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
2339
2340 /* We need to know whether the adjusted argument size
2341 (UNADJUSTED_ARGS_SIZE - ADJUSTMENT) constitutes an allocation
2342 or a deallocation. */
2343 if (!ordered_p (a: adjustment, b: unadjusted_args_size))
2344 return false;
2345
2346 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
2347 bytes after the call. The right number is the entire
2348 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
2349 by the arguments in the first place. */
2350 args_size->constant
2351 = pending_stack_adjust - adjustment + unadjusted_args_size;
2352
2353 *adjustment_out = adjustment;
2354 return true;
2355}
2356
2357/* Scan X expression if it does not dereference any argument slots
2358 we already clobbered by tail call arguments (as noted in stored_args_map
2359 bitmap).
2360 Return true if X expression dereferences such argument slots,
2361 false otherwise. */
2362
2363static bool
2364check_sibcall_argument_overlap_1 (rtx x)
2365{
2366 RTX_CODE code;
2367 int i, j;
2368 const char *fmt;
2369
2370 if (x == NULL_RTX)
2371 return false;
2372
2373 code = GET_CODE (x);
2374
2375 /* We need not check the operands of the CALL expression itself. */
2376 if (code == CALL)
2377 return false;
2378
2379 if (code == MEM)
2380 return (mem_might_overlap_already_clobbered_arg_p
2381 (XEXP (x, 0), size: GET_MODE_SIZE (GET_MODE (x))));
2382
2383 /* Scan all subexpressions. */
2384 fmt = GET_RTX_FORMAT (code);
2385 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2386 {
2387 if (*fmt == 'e')
2388 {
2389 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
2390 return true;
2391 }
2392 else if (*fmt == 'E')
2393 {
2394 for (j = 0; j < XVECLEN (x, i); j++)
2395 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
2396 return true;
2397 }
2398 }
2399 return false;
2400}
2401
2402/* Scan sequence after INSN if it does not dereference any argument slots
2403 we already clobbered by tail call arguments (as noted in stored_args_map
2404 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
2405 stored_args_map bitmap afterwards (when ARG is a register
2406 MARK_STORED_ARGS_MAP should be false). Return true if sequence after
2407 INSN dereferences such argument slots, false otherwise. */
2408
2409static bool
2410check_sibcall_argument_overlap (rtx_insn *insn, struct arg_data *arg,
2411 bool mark_stored_args_map)
2412{
2413 poly_uint64 low, high;
2414 unsigned HOST_WIDE_INT const_low, const_high;
2415
2416 if (insn == NULL_RTX)
2417 insn = get_insns ();
2418 else
2419 insn = NEXT_INSN (insn);
2420
2421 for (; insn; insn = NEXT_INSN (insn))
2422 if (INSN_P (insn)
2423 && check_sibcall_argument_overlap_1 (x: PATTERN (insn)))
2424 break;
2425
2426 if (mark_stored_args_map)
2427 {
2428 if (ARGS_GROW_DOWNWARD)
2429 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
2430 else
2431 low = arg->locate.slot_offset.constant;
2432 high = low + arg->locate.size.constant;
2433
2434 const_low = constant_lower_bound (a: low);
2435 if (high.is_constant (const_value: &const_high))
2436 for (unsigned HOST_WIDE_INT i = const_low; i < const_high; ++i)
2437 bitmap_set_bit (map: stored_args_map, bitno: i);
2438 else
2439 stored_args_watermark = MIN (stored_args_watermark, const_low);
2440 }
2441 return insn != NULL_RTX;
2442}
2443
2444/* Given that a function returns a value of mode MODE at the most
2445 significant end of hard register VALUE, shift VALUE left or right
2446 as specified by LEFT_P. Return true if some action was needed. */
2447
2448bool
2449shift_return_value (machine_mode mode, bool left_p, rtx value)
2450{
2451 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
2452 machine_mode value_mode = GET_MODE (value);
2453 poly_int64 shift = GET_MODE_BITSIZE (mode: value_mode) - GET_MODE_BITSIZE (mode);
2454
2455 if (known_eq (shift, 0))
2456 return false;
2457
2458 /* Use ashr rather than lshr for right shifts. This is for the benefit
2459 of the MIPS port, which requires SImode values to be sign-extended
2460 when stored in 64-bit registers. */
2461 if (!force_expand_binop (value_mode, left_p ? ashl_optab : ashr_optab,
2462 value, gen_int_shift_amount (value_mode, shift),
2463 value, 1, OPTAB_WIDEN))
2464 gcc_unreachable ();
2465 return true;
2466}
2467
2468/* If X is a likely-spilled register value, copy it to a pseudo
2469 register and return that register. Return X otherwise. */
2470
2471static rtx
2472avoid_likely_spilled_reg (rtx x)
2473{
2474 rtx new_rtx;
2475
2476 if (REG_P (x)
2477 && HARD_REGISTER_P (x)
2478 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
2479 {
2480 /* Make sure that we generate a REG rather than a CONCAT.
2481 Moves into CONCATs can need nontrivial instructions,
2482 and the whole point of this function is to avoid
2483 using the hard register directly in such a situation. */
2484 generating_concat_p = 0;
2485 new_rtx = gen_reg_rtx (GET_MODE (x));
2486 generating_concat_p = 1;
2487 emit_move_insn (new_rtx, x);
2488 return new_rtx;
2489 }
2490 return x;
2491}
2492
2493/* Helper function for expand_call.
2494 Return false is EXP is not implementable as a sibling call. */
2495
2496static bool
2497can_implement_as_sibling_call_p (tree exp,
2498 rtx structure_value_addr,
2499 tree funtype,
2500 tree fndecl,
2501 int flags,
2502 tree addr,
2503 const args_size &args_size)
2504{
2505 if (!targetm.have_sibcall_epilogue ()
2506 && !targetm.emit_epilogue_for_sibcall)
2507 {
2508 maybe_complain_about_tail_call
2509 (call_expr: exp,
2510 reason: "machine description does not have"
2511 " a sibcall_epilogue instruction pattern");
2512 return false;
2513 }
2514
2515 /* Doing sibling call optimization needs some work, since
2516 structure_value_addr can be allocated on the stack.
2517 It does not seem worth the effort since few optimizable
2518 sibling calls will return a structure. */
2519 if (structure_value_addr != NULL_RTX)
2520 {
2521 maybe_complain_about_tail_call (call_expr: exp, reason: "callee returns a structure");
2522 return false;
2523 }
2524
2525 /* Check whether the target is able to optimize the call
2526 into a sibcall. */
2527 if (!targetm.function_ok_for_sibcall (fndecl, exp))
2528 {
2529 maybe_complain_about_tail_call (call_expr: exp,
2530 reason: "target is not able to optimize the"
2531 " call into a sibling call");
2532 return false;
2533 }
2534
2535 /* Functions that do not return exactly once may not be sibcall
2536 optimized. */
2537 if (flags & ECF_RETURNS_TWICE)
2538 {
2539 maybe_complain_about_tail_call (call_expr: exp, reason: "callee returns twice");
2540 return false;
2541 }
2542 if (flags & ECF_NORETURN)
2543 {
2544 maybe_complain_about_tail_call (call_expr: exp, reason: "callee does not return");
2545 return false;
2546 }
2547
2548 if (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr))))
2549 {
2550 maybe_complain_about_tail_call (call_expr: exp, reason: "volatile function type");
2551 return false;
2552 }
2553
2554 /* __sanitizer_cov_trace_pc is supposed to inspect its return address
2555 to identify the caller, and therefore should not be tailcalled. */
2556 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2557 && DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_SANITIZER_COV_TRACE_PC)
2558 {
2559 /* No need for maybe_complain_about_tail_call here:
2560 the call is synthesized by the compiler. */
2561 return false;
2562 }
2563
2564 /* If the called function is nested in the current one, it might access
2565 some of the caller's arguments, but could clobber them beforehand if
2566 the argument areas are shared. */
2567 if (fndecl && decl_function_context (fndecl) == current_function_decl)
2568 {
2569 maybe_complain_about_tail_call (call_expr: exp, reason: "nested function");
2570 return false;
2571 }
2572
2573 /* If this function requires more stack slots than the current
2574 function, we cannot change it into a sibling call.
2575 crtl->args.pretend_args_size is not part of the
2576 stack allocated by our caller. */
2577 if (maybe_gt (args_size.constant,
2578 crtl->args.size - crtl->args.pretend_args_size))
2579 {
2580 maybe_complain_about_tail_call (call_expr: exp,
2581 reason: "callee required more stack slots"
2582 " than the caller");
2583 return false;
2584 }
2585
2586 /* If the callee pops its own arguments, then it must pop exactly
2587 the same number of arguments as the current function. */
2588 if (maybe_ne (a: targetm.calls.return_pops_args (fndecl, funtype,
2589 args_size.constant),
2590 b: targetm.calls.return_pops_args (current_function_decl,
2591 TREE_TYPE
2592 (current_function_decl),
2593 crtl->args.size)))
2594 {
2595 maybe_complain_about_tail_call (call_expr: exp,
2596 reason: "inconsistent number of"
2597 " popped arguments");
2598 return false;
2599 }
2600
2601 if (!lang_hooks.decls.ok_for_sibcall (fndecl))
2602 {
2603 maybe_complain_about_tail_call (call_expr: exp, reason: "frontend does not support"
2604 " sibling call");
2605 return false;
2606 }
2607
2608 /* All checks passed. */
2609 return true;
2610}
2611
2612/* Update stack alignment when the parameter is passed in the stack
2613 since the outgoing parameter requires extra alignment on the calling
2614 function side. */
2615
2616static void
2617update_stack_alignment_for_call (struct locate_and_pad_arg_data *locate)
2618{
2619 if (crtl->stack_alignment_needed < locate->boundary)
2620 crtl->stack_alignment_needed = locate->boundary;
2621 if (crtl->preferred_stack_boundary < locate->boundary)
2622 crtl->preferred_stack_boundary = locate->boundary;
2623}
2624
2625/* Generate all the code for a CALL_EXPR exp
2626 and return an rtx for its value.
2627 Store the value in TARGET (specified as an rtx) if convenient.
2628 If the value is stored in TARGET then TARGET is returned.
2629 If IGNORE is nonzero, then we ignore the value of the function call. */
2630
2631rtx
2632expand_call (tree exp, rtx target, int ignore)
2633{
2634 /* Nonzero if we are currently expanding a call. */
2635 static int currently_expanding_call = 0;
2636
2637 /* RTX for the function to be called. */
2638 rtx funexp;
2639 /* Sequence of insns to perform a normal "call". */
2640 rtx_insn *normal_call_insns = NULL;
2641 /* Sequence of insns to perform a tail "call". */
2642 rtx_insn *tail_call_insns = NULL;
2643 /* Data type of the function. */
2644 tree funtype;
2645 tree type_arg_types;
2646 tree rettype;
2647 /* Declaration of the function being called,
2648 or 0 if the function is computed (not known by name). */
2649 tree fndecl = 0;
2650 /* The type of the function being called. */
2651 tree fntype;
2652 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
2653 bool must_tail_call = CALL_EXPR_MUST_TAIL_CALL (exp);
2654 int pass;
2655
2656 /* Register in which non-BLKmode value will be returned,
2657 or 0 if no value or if value is BLKmode. */
2658 rtx valreg;
2659 /* Address where we should return a BLKmode value;
2660 0 if value not BLKmode. */
2661 rtx structure_value_addr = 0;
2662 /* Nonzero if that address is being passed by treating it as
2663 an extra, implicit first parameter. Otherwise,
2664 it is passed by being copied directly into struct_value_rtx. */
2665 int structure_value_addr_parm = 0;
2666 /* Holds the value of implicit argument for the struct value. */
2667 tree structure_value_addr_value = NULL_TREE;
2668 /* Size of aggregate value wanted, or zero if none wanted
2669 or if we are using the non-reentrant PCC calling convention
2670 or expecting the value in registers. */
2671 poly_int64 struct_value_size = 0;
2672 /* True if called function returns an aggregate in memory PCC style,
2673 by returning the address of where to find it. */
2674 bool pcc_struct_value = false;
2675 rtx struct_value = 0;
2676
2677 /* Number of actual parameters in this call, including struct value addr. */
2678 int num_actuals;
2679 /* Number of named args. Args after this are anonymous ones
2680 and they must all go on the stack. */
2681 int n_named_args;
2682 /* Number of complex actual arguments that need to be split. */
2683 int num_complex_actuals = 0;
2684
2685 /* Vector of information about each argument.
2686 Arguments are numbered in the order they will be pushed,
2687 not the order they are written. */
2688 struct arg_data *args;
2689
2690 /* Total size in bytes of all the stack-parms scanned so far. */
2691 struct args_size args_size;
2692 struct args_size adjusted_args_size;
2693 /* Size of arguments before any adjustments (such as rounding). */
2694 poly_int64 unadjusted_args_size;
2695 /* Data on reg parms scanned so far. */
2696 CUMULATIVE_ARGS args_so_far_v;
2697 cumulative_args_t args_so_far;
2698 /* Nonzero if a reg parm has been scanned. */
2699 int reg_parm_seen;
2700
2701 /* True if we must avoid push-insns in the args for this call.
2702 If stack space is allocated for register parameters, but not by the
2703 caller, then it is preallocated in the fixed part of the stack frame.
2704 So the entire argument block must then be preallocated (i.e., we
2705 ignore PUSH_ROUNDING in that case). */
2706 bool must_preallocate = !targetm.calls.push_argument (0);
2707
2708 /* Size of the stack reserved for parameter registers. */
2709 int reg_parm_stack_space = 0;
2710
2711 /* Address of space preallocated for stack parms
2712 (on machines that lack push insns), or 0 if space not preallocated. */
2713 rtx argblock = 0;
2714
2715 /* Mask of ECF_ and ERF_ flags. */
2716 int flags = 0;
2717 int return_flags = 0;
2718#ifdef REG_PARM_STACK_SPACE
2719 /* Define the boundary of the register parm stack space that needs to be
2720 saved, if any. */
2721 int low_to_save, high_to_save;
2722 rtx save_area = 0; /* Place that it is saved */
2723#endif
2724
2725 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2726 char *initial_stack_usage_map = stack_usage_map;
2727 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
2728 char *stack_usage_map_buf = NULL;
2729
2730 poly_int64 old_stack_allocated;
2731
2732 /* State variables to track stack modifications. */
2733 rtx old_stack_level = 0;
2734 int old_stack_arg_under_construction = 0;
2735 poly_int64 old_pending_adj = 0;
2736 int old_inhibit_defer_pop = inhibit_defer_pop;
2737
2738 /* Some stack pointer alterations we make are performed via
2739 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
2740 which we then also need to save/restore along the way. */
2741 poly_int64 old_stack_pointer_delta = 0;
2742
2743 rtx call_fusage;
2744 tree addr = CALL_EXPR_FN (exp);
2745 int i;
2746 /* The alignment of the stack, in bits. */
2747 unsigned HOST_WIDE_INT preferred_stack_boundary;
2748 /* The alignment of the stack, in bytes. */
2749 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
2750 /* The static chain value to use for this call. */
2751 rtx static_chain_value;
2752 /* See if this is "nothrow" function call. */
2753 if (TREE_NOTHROW (exp))
2754 flags |= ECF_NOTHROW;
2755
2756 /* See if we can find a DECL-node for the actual function, and get the
2757 function attributes (flags) from the function decl or type node. */
2758 fndecl = get_callee_fndecl (exp);
2759 if (fndecl)
2760 {
2761 fntype = TREE_TYPE (fndecl);
2762 flags |= flags_from_decl_or_type (exp: fndecl);
2763 return_flags |= decl_return_flags (fndecl);
2764 }
2765 else
2766 {
2767 fntype = TREE_TYPE (TREE_TYPE (addr));
2768 flags |= flags_from_decl_or_type (exp: fntype);
2769 if (CALL_EXPR_BY_DESCRIPTOR (exp))
2770 flags |= ECF_BY_DESCRIPTOR;
2771 }
2772 rettype = TREE_TYPE (exp);
2773
2774 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
2775
2776 /* Warn if this value is an aggregate type,
2777 regardless of which calling convention we are using for it. */
2778 if (AGGREGATE_TYPE_P (rettype))
2779 warning (OPT_Waggregate_return, "function call has aggregate value");
2780
2781 /* If the result of a non looping pure or const function call is
2782 ignored (or void), and none of its arguments are volatile, we can
2783 avoid expanding the call and just evaluate the arguments for
2784 side-effects. */
2785 if ((flags & (ECF_CONST | ECF_PURE))
2786 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
2787 && (flags & ECF_NOTHROW)
2788 && (ignore || target == const0_rtx
2789 || TYPE_MODE (rettype) == VOIDmode))
2790 {
2791 bool volatilep = false;
2792 tree arg;
2793 call_expr_arg_iterator iter;
2794
2795 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2796 if (TREE_THIS_VOLATILE (arg))
2797 {
2798 volatilep = true;
2799 break;
2800 }
2801
2802 if (! volatilep)
2803 {
2804 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2805 expand_expr (exp: arg, const0_rtx, VOIDmode, modifier: EXPAND_NORMAL);
2806 return const0_rtx;
2807 }
2808 }
2809
2810#ifdef REG_PARM_STACK_SPACE
2811 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
2812#endif
2813
2814 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
2815 && reg_parm_stack_space > 0 && targetm.calls.push_argument (0))
2816 must_preallocate = true;
2817
2818 /* Set up a place to return a structure. */
2819
2820 /* Cater to broken compilers. */
2821 if (aggregate_value_p (exp, fntype))
2822 {
2823 /* This call returns a big structure. */
2824 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
2825
2826#ifdef PCC_STATIC_STRUCT_RETURN
2827 {
2828 pcc_struct_value = true;
2829 }
2830#else /* not PCC_STATIC_STRUCT_RETURN */
2831 {
2832 if (!poly_int_tree_p (TYPE_SIZE_UNIT (rettype), value: &struct_value_size))
2833 struct_value_size = -1;
2834
2835 /* Even if it is semantically safe to use the target as the return
2836 slot, it may be not sufficiently aligned for the return type. */
2837 if (CALL_EXPR_RETURN_SLOT_OPT (exp)
2838 && target
2839 && MEM_P (target)
2840 /* If rettype is addressable, we may not create a temporary.
2841 If target is properly aligned at runtime and the compiler
2842 just doesn't know about it, it will work fine, otherwise it
2843 will be UB. */
2844 && (TREE_ADDRESSABLE (rettype)
2845 || !(MEM_ALIGN (target) < TYPE_ALIGN (rettype)
2846 && targetm.slow_unaligned_access (TYPE_MODE (rettype),
2847 MEM_ALIGN (target)))))
2848 structure_value_addr = XEXP (target, 0);
2849 else
2850 {
2851 /* For variable-sized objects, we must be called with a target
2852 specified. If we were to allocate space on the stack here,
2853 we would have no way of knowing when to free it. */
2854 rtx d = assign_temp (rettype, 1, 1);
2855 structure_value_addr = XEXP (d, 0);
2856 target = 0;
2857 }
2858 }
2859#endif /* not PCC_STATIC_STRUCT_RETURN */
2860 }
2861
2862 /* Figure out the amount to which the stack should be aligned. */
2863 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2864 if (fndecl)
2865 {
2866 struct cgraph_rtl_info *i = cgraph_node::rtl_info (fndecl);
2867 /* Without automatic stack alignment, we can't increase preferred
2868 stack boundary. With automatic stack alignment, it is
2869 unnecessary since unless we can guarantee that all callers will
2870 align the outgoing stack properly, callee has to align its
2871 stack anyway. */
2872 if (i
2873 && i->preferred_incoming_stack_boundary
2874 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
2875 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2876 }
2877
2878 /* Operand 0 is a pointer-to-function; get the type of the function. */
2879 funtype = TREE_TYPE (addr);
2880 gcc_assert (POINTER_TYPE_P (funtype));
2881 funtype = TREE_TYPE (funtype);
2882
2883 /* Count whether there are actual complex arguments that need to be split
2884 into their real and imaginary parts. Munge the type_arg_types
2885 appropriately here as well. */
2886 if (targetm.calls.split_complex_arg)
2887 {
2888 call_expr_arg_iterator iter;
2889 tree arg;
2890 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2891 {
2892 tree type = TREE_TYPE (arg);
2893 if (type && TREE_CODE (type) == COMPLEX_TYPE
2894 && targetm.calls.split_complex_arg (type))
2895 num_complex_actuals++;
2896 }
2897 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2898 }
2899 else
2900 type_arg_types = TYPE_ARG_TYPES (funtype);
2901
2902 if (flags & ECF_MAY_BE_ALLOCA)
2903 cfun->calls_alloca = 1;
2904
2905 /* If struct_value_rtx is 0, it means pass the address
2906 as if it were an extra parameter. Put the argument expression
2907 in structure_value_addr_value. */
2908 if (structure_value_addr && struct_value == 0)
2909 {
2910 /* If structure_value_addr is a REG other than
2911 virtual_outgoing_args_rtx, we can use always use it. If it
2912 is not a REG, we must always copy it into a register.
2913 If it is virtual_outgoing_args_rtx, we must copy it to another
2914 register in some cases. */
2915 rtx temp = (!REG_P (structure_value_addr)
2916 || (ACCUMULATE_OUTGOING_ARGS
2917 && stack_arg_under_construction
2918 && structure_value_addr == virtual_outgoing_args_rtx)
2919 ? copy_addr_to_reg (convert_memory_address
2920 (Pmode, structure_value_addr))
2921 : structure_value_addr);
2922
2923 structure_value_addr_value =
2924 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
2925 structure_value_addr_parm = 1;
2926 }
2927
2928 /* Count the arguments and set NUM_ACTUALS. */
2929 num_actuals
2930 = call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
2931
2932 /* Compute number of named args.
2933 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2934
2935 if (type_arg_types != 0)
2936 n_named_args
2937 = (list_length (type_arg_types)
2938 /* Count the struct value address, if it is passed as a parm. */
2939 + structure_value_addr_parm);
2940 else if (TYPE_NO_NAMED_ARGS_STDARG_P (funtype))
2941 n_named_args = structure_value_addr_parm;
2942 else
2943 /* If we know nothing, treat all args as named. */
2944 n_named_args = num_actuals;
2945
2946 /* Start updating where the next arg would go.
2947
2948 On some machines (such as the PA) indirect calls have a different
2949 calling convention than normal calls. The fourth argument in
2950 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2951 or not. */
2952 INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
2953 args_so_far = pack_cumulative_args (arg: &args_so_far_v);
2954
2955 /* Now possibly adjust the number of named args.
2956 Normally, don't include the last named arg if anonymous args follow.
2957 We do include the last named arg if
2958 targetm.calls.strict_argument_naming() returns nonzero.
2959 (If no anonymous args follow, the result of list_length is actually
2960 one too large. This is harmless.)
2961
2962 If targetm.calls.pretend_outgoing_varargs_named() returns
2963 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2964 this machine will be able to place unnamed args that were passed
2965 in registers into the stack. So treat all args as named. This
2966 allows the insns emitting for a specific argument list to be
2967 independent of the function declaration.
2968
2969 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2970 we do not have any reliable way to pass unnamed args in
2971 registers, so we must force them into memory. */
2972
2973 if ((type_arg_types != 0 || TYPE_NO_NAMED_ARGS_STDARG_P (funtype))
2974 && targetm.calls.strict_argument_naming (args_so_far))
2975 ;
2976 else if (type_arg_types != 0
2977 && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
2978 /* Don't include the last named arg. */
2979 --n_named_args;
2980 else if (TYPE_NO_NAMED_ARGS_STDARG_P (funtype)
2981 && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
2982 n_named_args = 0;
2983 else
2984 /* Treat all args as named. */
2985 n_named_args = num_actuals;
2986
2987 /* Make a vector to hold all the information about each arg. */
2988 args = XCNEWVEC (struct arg_data, num_actuals);
2989
2990 /* Build up entries in the ARGS array, compute the size of the
2991 arguments into ARGS_SIZE, etc. */
2992 initialize_argument_information (num_actuals, args, args_size: &args_size,
2993 n_named_args, exp,
2994 struct_value_addr_value: structure_value_addr_value, fndecl, fntype,
2995 args_so_far, reg_parm_stack_space,
2996 old_stack_level: &old_stack_level, old_pending_adj: &old_pending_adj,
2997 must_preallocate: &must_preallocate, ecf_flags: &flags,
2998 may_tailcall: &try_tail_call, CALL_FROM_THUNK_P (exp));
2999
3000 if (args_size.var)
3001 must_preallocate = true;
3002
3003 /* Now make final decision about preallocating stack space. */
3004 must_preallocate = finalize_must_preallocate (must_preallocate,
3005 num_actuals, args,
3006 args_size: &args_size);
3007
3008 /* If the structure value address will reference the stack pointer, we
3009 must stabilize it. We don't need to do this if we know that we are
3010 not going to adjust the stack pointer in processing this call. */
3011
3012 if (structure_value_addr
3013 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
3014 || reg_mentioned_p (virtual_outgoing_args_rtx,
3015 structure_value_addr))
3016 && (args_size.var
3017 || (!ACCUMULATE_OUTGOING_ARGS
3018 && maybe_ne (a: args_size.constant, b: 0))))
3019 structure_value_addr = copy_to_reg (structure_value_addr);
3020
3021 /* Tail calls can make things harder to debug, and we've traditionally
3022 pushed these optimizations into -O2. Don't try if we're already
3023 expanding a call, as that means we're an argument. Don't try if
3024 there's cleanups, as we know there's code to follow the call. */
3025 if (currently_expanding_call++ != 0
3026 || (!flag_optimize_sibling_calls && !CALL_FROM_THUNK_P (exp))
3027 || args_size.var
3028 || dbg_cnt (index: tail_call) == false)
3029 try_tail_call = 0;
3030
3031 /* Workaround buggy C/C++ wrappers around Fortran routines with
3032 character(len=constant) arguments if the hidden string length arguments
3033 are passed on the stack; if the callers forget to pass those arguments,
3034 attempting to tail call in such routines leads to stack corruption.
3035 Avoid tail calls in functions where at least one such hidden string
3036 length argument is passed (partially or fully) on the stack in the
3037 caller and the callee needs to pass any arguments on the stack.
3038 See PR90329. */
3039 if (try_tail_call && maybe_ne (a: args_size.constant, b: 0))
3040 for (tree arg = DECL_ARGUMENTS (current_function_decl);
3041 arg; arg = DECL_CHAIN (arg))
3042 if (DECL_HIDDEN_STRING_LENGTH (arg) && DECL_INCOMING_RTL (arg))
3043 {
3044 subrtx_iterator::array_type array;
3045 FOR_EACH_SUBRTX (iter, array, DECL_INCOMING_RTL (arg), NONCONST)
3046 if (MEM_P (*iter))
3047 {
3048 try_tail_call = 0;
3049 break;
3050 }
3051 }
3052
3053 /* If the user has marked the function as requiring tail-call
3054 optimization, attempt it. */
3055 if (must_tail_call)
3056 try_tail_call = 1;
3057
3058 /* Rest of purposes for tail call optimizations to fail. */
3059 if (try_tail_call)
3060 try_tail_call = can_implement_as_sibling_call_p (exp,
3061 structure_value_addr,
3062 funtype,
3063 fndecl,
3064 flags, addr, args_size);
3065
3066 /* Check if caller and callee disagree in promotion of function
3067 return value. */
3068 if (try_tail_call)
3069 {
3070 machine_mode caller_mode, caller_promoted_mode;
3071 machine_mode callee_mode, callee_promoted_mode;
3072 int caller_unsignedp, callee_unsignedp;
3073 tree caller_res = DECL_RESULT (current_function_decl);
3074
3075 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
3076 caller_mode = DECL_MODE (caller_res);
3077 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
3078 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
3079 caller_promoted_mode
3080 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
3081 &caller_unsignedp,
3082 TREE_TYPE (current_function_decl), 1);
3083 callee_promoted_mode
3084 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
3085 &callee_unsignedp,
3086 funtype, 1);
3087 if (caller_mode != VOIDmode
3088 && (caller_promoted_mode != callee_promoted_mode
3089 || ((caller_mode != caller_promoted_mode
3090 || callee_mode != callee_promoted_mode)
3091 && (caller_unsignedp != callee_unsignedp
3092 || partial_subreg_p (outermode: caller_mode, innermode: callee_mode)))))
3093 {
3094 try_tail_call = 0;
3095 maybe_complain_about_tail_call (call_expr: exp,
3096 reason: "caller and callee disagree in"
3097 " promotion of function"
3098 " return value");
3099 }
3100 }
3101
3102 /* Ensure current function's preferred stack boundary is at least
3103 what we need. Stack alignment may also increase preferred stack
3104 boundary. */
3105 for (i = 0; i < num_actuals; i++)
3106 if (reg_parm_stack_space > 0
3107 || args[i].reg == 0
3108 || args[i].partial != 0
3109 || args[i].pass_on_stack)
3110 update_stack_alignment_for_call (locate: &args[i].locate);
3111 if (crtl->preferred_stack_boundary < preferred_stack_boundary)
3112 crtl->preferred_stack_boundary = preferred_stack_boundary;
3113 else
3114 preferred_stack_boundary = crtl->preferred_stack_boundary;
3115
3116 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
3117
3118 if (flag_callgraph_info)
3119 record_final_call (callee: fndecl, EXPR_LOCATION (exp));
3120
3121 /* We want to make two insn chains; one for a sibling call, the other
3122 for a normal call. We will select one of the two chains after
3123 initial RTL generation is complete. */
3124 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
3125 {
3126 bool sibcall_failure = false;
3127 bool normal_failure = false;
3128 /* We want to emit any pending stack adjustments before the tail
3129 recursion "call". That way we know any adjustment after the tail
3130 recursion call can be ignored if we indeed use the tail
3131 call expansion. */
3132 saved_pending_stack_adjust save;
3133 rtx_insn *insns, *before_call, *after_args;
3134 rtx next_arg_reg;
3135
3136 if (pass == 0)
3137 {
3138 /* State variables we need to save and restore between
3139 iterations. */
3140 save_pending_stack_adjust (&save);
3141 }
3142 if (pass)
3143 flags &= ~ECF_SIBCALL;
3144 else
3145 flags |= ECF_SIBCALL;
3146
3147 /* Other state variables that we must reinitialize each time
3148 through the loop (that are not initialized by the loop itself). */
3149 argblock = 0;
3150 call_fusage = 0;
3151
3152 /* Start a new sequence for the normal call case.
3153
3154 From this point on, if the sibling call fails, we want to set
3155 sibcall_failure instead of continuing the loop. */
3156 start_sequence ();
3157
3158 /* Don't let pending stack adjusts add up to too much.
3159 Also, do all pending adjustments now if there is any chance
3160 this might be a call to alloca or if we are expanding a sibling
3161 call sequence.
3162 Also do the adjustments before a throwing call, otherwise
3163 exception handling can fail; PR 19225. */
3164 if (maybe_ge (pending_stack_adjust, 32)
3165 || (maybe_ne (pending_stack_adjust, b: 0)
3166 && (flags & ECF_MAY_BE_ALLOCA))
3167 || (maybe_ne (pending_stack_adjust, b: 0)
3168 && flag_exceptions && !(flags & ECF_NOTHROW))
3169 || pass == 0)
3170 do_pending_stack_adjust ();
3171
3172 /* Precompute any arguments as needed. */
3173 if (pass)
3174 precompute_arguments (num_actuals, args);
3175
3176 /* Now we are about to start emitting insns that can be deleted
3177 if a libcall is deleted. */
3178 if (pass && (flags & ECF_MALLOC))
3179 start_sequence ();
3180
3181 /* Check the canary value for sibcall or function which doesn't
3182 return and could throw. */
3183 if ((pass == 0
3184 || ((flags & ECF_NORETURN) != 0 && tree_could_throw_p (exp)))
3185 && crtl->stack_protect_guard
3186 && targetm.stack_protect_runtime_enabled_p ())
3187 stack_protect_epilogue ();
3188
3189 adjusted_args_size = args_size;
3190 /* Compute the actual size of the argument block required. The variable
3191 and constant sizes must be combined, the size may have to be rounded,
3192 and there may be a minimum required size. When generating a sibcall
3193 pattern, do not round up, since we'll be re-using whatever space our
3194 caller provided. */
3195 unadjusted_args_size
3196 = compute_argument_block_size (reg_parm_stack_space,
3197 args_size: &adjusted_args_size,
3198 fndecl, fntype,
3199 preferred_stack_boundary: (pass == 0 ? 0
3200 : preferred_stack_boundary));
3201
3202 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
3203
3204 /* The argument block when performing a sibling call is the
3205 incoming argument block. */
3206 if (pass == 0)
3207 {
3208 argblock = crtl->args.internal_arg_pointer;
3209 if (STACK_GROWS_DOWNWARD)
3210 argblock
3211 = plus_constant (Pmode, argblock, crtl->args.pretend_args_size);
3212 else
3213 argblock
3214 = plus_constant (Pmode, argblock, -crtl->args.pretend_args_size);
3215
3216 HOST_WIDE_INT map_size = constant_lower_bound (a: args_size.constant);
3217 stored_args_map = sbitmap_alloc (map_size);
3218 bitmap_clear (stored_args_map);
3219 stored_args_watermark = HOST_WIDE_INT_M1U;
3220 }
3221
3222 /* If we have no actual push instructions, or shouldn't use them,
3223 make space for all args right now. */
3224 else if (adjusted_args_size.var != 0)
3225 {
3226 if (old_stack_level == 0)
3227 {
3228 emit_stack_save (SAVE_BLOCK, &old_stack_level);
3229 old_stack_pointer_delta = stack_pointer_delta;
3230 old_pending_adj = pending_stack_adjust;
3231 pending_stack_adjust = 0;
3232 /* stack_arg_under_construction says whether a stack arg is
3233 being constructed at the old stack level. Pushing the stack
3234 gets a clean outgoing argument block. */
3235 old_stack_arg_under_construction = stack_arg_under_construction;
3236 stack_arg_under_construction = 0;
3237 }
3238 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
3239 if (flag_stack_usage_info)
3240 current_function_has_unbounded_dynamic_stack_size = 1;
3241 }
3242 else
3243 {
3244 /* Note that we must go through the motions of allocating an argument
3245 block even if the size is zero because we may be storing args
3246 in the area reserved for register arguments, which may be part of
3247 the stack frame. */
3248
3249 poly_int64 needed = adjusted_args_size.constant;
3250
3251 /* Store the maximum argument space used. It will be pushed by
3252 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
3253 checking). */
3254
3255 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
3256 b: needed);
3257
3258 if (must_preallocate)
3259 {
3260 if (ACCUMULATE_OUTGOING_ARGS)
3261 {
3262 /* Since the stack pointer will never be pushed, it is
3263 possible for the evaluation of a parm to clobber
3264 something we have already written to the stack.
3265 Since most function calls on RISC machines do not use
3266 the stack, this is uncommon, but must work correctly.
3267
3268 Therefore, we save any area of the stack that was already
3269 written and that we are using. Here we set up to do this
3270 by making a new stack usage map from the old one. The
3271 actual save will be done by store_one_arg.
3272
3273 Another approach might be to try to reorder the argument
3274 evaluations to avoid this conflicting stack usage. */
3275
3276 /* Since we will be writing into the entire argument area,
3277 the map must be allocated for its entire size, not just
3278 the part that is the responsibility of the caller. */
3279 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
3280 needed += reg_parm_stack_space;
3281
3282 poly_int64 limit = needed;
3283 if (ARGS_GROW_DOWNWARD)
3284 limit += 1;
3285
3286 /* For polynomial sizes, this is the maximum possible
3287 size needed for arguments with a constant size
3288 and offset. */
3289 HOST_WIDE_INT const_limit = constant_lower_bound (a: limit);
3290 highest_outgoing_arg_in_use
3291 = MAX (initial_highest_arg_in_use, const_limit);
3292
3293 free (ptr: stack_usage_map_buf);
3294 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
3295 stack_usage_map = stack_usage_map_buf;
3296
3297 if (initial_highest_arg_in_use)
3298 memcpy (dest: stack_usage_map, src: initial_stack_usage_map,
3299 n: initial_highest_arg_in_use);
3300
3301 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3302 memset (s: &stack_usage_map[initial_highest_arg_in_use], c: 0,
3303 n: (highest_outgoing_arg_in_use
3304 - initial_highest_arg_in_use));
3305 needed = 0;
3306
3307 /* The address of the outgoing argument list must not be
3308 copied to a register here, because argblock would be left
3309 pointing to the wrong place after the call to
3310 allocate_dynamic_stack_space below. */
3311
3312 argblock = virtual_outgoing_args_rtx;
3313 }
3314 else
3315 {
3316 /* Try to reuse some or all of the pending_stack_adjust
3317 to get this space. */
3318 if (inhibit_defer_pop == 0
3319 && (combine_pending_stack_adjustment_and_call
3320 (adjustment_out: &needed,
3321 unadjusted_args_size,
3322 args_size: &adjusted_args_size,
3323 preferred_unit_stack_boundary)))
3324 {
3325 /* combine_pending_stack_adjustment_and_call computes
3326 an adjustment before the arguments are allocated.
3327 Account for them and see whether or not the stack
3328 needs to go up or down. */
3329 needed = unadjusted_args_size - needed;
3330
3331 /* Checked by
3332 combine_pending_stack_adjustment_and_call. */
3333 gcc_checking_assert (ordered_p (needed, 0));
3334 if (maybe_lt (a: needed, b: 0))
3335 {
3336 /* We're releasing stack space. */
3337 /* ??? We can avoid any adjustment at all if we're
3338 already aligned. FIXME. */
3339 pending_stack_adjust = -needed;
3340 do_pending_stack_adjust ();
3341 needed = 0;
3342 }
3343 else
3344 /* We need to allocate space. We'll do that in
3345 push_block below. */
3346 pending_stack_adjust = 0;
3347 }
3348
3349 /* Special case this because overhead of `push_block' in
3350 this case is non-trivial. */
3351 if (known_eq (needed, 0))
3352 argblock = virtual_outgoing_args_rtx;
3353 else
3354 {
3355 rtx needed_rtx = gen_int_mode (needed, Pmode);
3356 argblock = push_block (needed_rtx, 0, 0);
3357 if (ARGS_GROW_DOWNWARD)
3358 argblock = plus_constant (Pmode, argblock, needed);
3359 }
3360
3361 /* We only really need to call `copy_to_reg' in the case
3362 where push insns are going to be used to pass ARGBLOCK
3363 to a function call in ARGS. In that case, the stack
3364 pointer changes value from the allocation point to the
3365 call point, and hence the value of
3366 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
3367 as well always do it. */
3368 argblock = copy_to_reg (argblock);
3369 }
3370 }
3371 }
3372
3373 if (ACCUMULATE_OUTGOING_ARGS)
3374 {
3375 /* The save/restore code in store_one_arg handles all
3376 cases except one: a constructor call (including a C
3377 function returning a BLKmode struct) to initialize
3378 an argument. */
3379 if (stack_arg_under_construction)
3380 {
3381 rtx push_size
3382 = (gen_int_mode
3383 (adjusted_args_size.constant
3384 + (OUTGOING_REG_PARM_STACK_SPACE (!fndecl ? fntype
3385 : TREE_TYPE (fndecl))
3386 ? 0 : reg_parm_stack_space), Pmode));
3387 if (old_stack_level == 0)
3388 {
3389 emit_stack_save (SAVE_BLOCK, &old_stack_level);
3390 old_stack_pointer_delta = stack_pointer_delta;
3391 old_pending_adj = pending_stack_adjust;
3392 pending_stack_adjust = 0;
3393 /* stack_arg_under_construction says whether a stack
3394 arg is being constructed at the old stack level.
3395 Pushing the stack gets a clean outgoing argument
3396 block. */
3397 old_stack_arg_under_construction
3398 = stack_arg_under_construction;
3399 stack_arg_under_construction = 0;
3400 /* Make a new map for the new argument list. */
3401 free (ptr: stack_usage_map_buf);
3402 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
3403 stack_usage_map = stack_usage_map_buf;
3404 highest_outgoing_arg_in_use = 0;
3405 stack_usage_watermark = HOST_WIDE_INT_M1U;
3406 }
3407 /* We can pass TRUE as the 4th argument because we just
3408 saved the stack pointer and will restore it right after
3409 the call. */
3410 allocate_dynamic_stack_space (push_size, 0, BIGGEST_ALIGNMENT,
3411 -1, true);
3412 }
3413
3414 /* If argument evaluation might modify the stack pointer,
3415 copy the address of the argument list to a register. */
3416 for (i = 0; i < num_actuals; i++)
3417 if (args[i].pass_on_stack)
3418 {
3419 argblock = copy_addr_to_reg (argblock);
3420 break;
3421 }
3422 }
3423
3424 compute_argument_addresses (args, argblock, num_actuals);
3425
3426 /* Stack is properly aligned, pops can't safely be deferred during
3427 the evaluation of the arguments. */
3428 NO_DEFER_POP;
3429
3430 /* Precompute all register parameters. It isn't safe to compute
3431 anything once we have started filling any specific hard regs.
3432 TLS symbols sometimes need a call to resolve. Precompute
3433 register parameters before any stack pointer manipulation
3434 to avoid unaligned stack in the called function. */
3435 precompute_register_parameters (num_actuals, args, reg_parm_seen: &reg_parm_seen);
3436
3437 OK_DEFER_POP;
3438
3439 /* Perform stack alignment before the first push (the last arg). */
3440 if (argblock == 0
3441 && maybe_gt (adjusted_args_size.constant, reg_parm_stack_space)
3442 && maybe_ne (a: adjusted_args_size.constant, b: unadjusted_args_size))
3443 {
3444 /* When the stack adjustment is pending, we get better code
3445 by combining the adjustments. */
3446 if (maybe_ne (pending_stack_adjust, b: 0)
3447 && ! inhibit_defer_pop
3448 && (combine_pending_stack_adjustment_and_call
3449 (adjustment_out: &pending_stack_adjust,
3450 unadjusted_args_size,
3451 args_size: &adjusted_args_size,
3452 preferred_unit_stack_boundary)))
3453 do_pending_stack_adjust ();
3454 else if (argblock == 0)
3455 anti_adjust_stack (gen_int_mode (adjusted_args_size.constant
3456 - unadjusted_args_size,
3457 Pmode));
3458 }
3459 /* Now that the stack is properly aligned, pops can't safely
3460 be deferred during the evaluation of the arguments. */
3461 NO_DEFER_POP;
3462
3463 /* Record the maximum pushed stack space size. We need to delay
3464 doing it this far to take into account the optimization done
3465 by combine_pending_stack_adjustment_and_call. */
3466 if (flag_stack_usage_info
3467 && !ACCUMULATE_OUTGOING_ARGS
3468 && pass
3469 && adjusted_args_size.var == 0)
3470 {
3471 poly_int64 pushed = (adjusted_args_size.constant
3472 + pending_stack_adjust);
3473 current_function_pushed_stack_size
3474 = upper_bound (current_function_pushed_stack_size, b: pushed);
3475 }
3476
3477 funexp = rtx_for_function_call (fndecl, addr);
3478
3479 if (CALL_EXPR_STATIC_CHAIN (exp))
3480 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
3481 else
3482 static_chain_value = 0;
3483
3484#ifdef REG_PARM_STACK_SPACE
3485 /* Save the fixed argument area if it's part of the caller's frame and
3486 is clobbered by argument setup for this call. */
3487 if (ACCUMULATE_OUTGOING_ARGS && pass)
3488 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3489 low_to_save: &low_to_save, high_to_save: &high_to_save);
3490#endif
3491
3492 /* Now store (and compute if necessary) all non-register parms.
3493 These come before register parms, since they can require block-moves,
3494 which could clobber the registers used for register parms.
3495 Parms which have partial registers are not stored here,
3496 but we do preallocate space here if they want that. */
3497
3498 for (i = 0; i < num_actuals; i++)
3499 {
3500 if (args[i].reg == 0 || args[i].pass_on_stack)
3501 {
3502 rtx_insn *before_arg = get_last_insn ();
3503
3504 /* We don't allow passing huge (> 2^30 B) arguments
3505 by value. It would cause an overflow later on. */
3506 if (constant_lower_bound (a: adjusted_args_size.constant)
3507 >= (1 << (HOST_BITS_PER_INT - 2)))
3508 {
3509 sorry ("passing too large argument on stack");
3510 /* Don't worry about stack clean-up. */
3511 if (pass == 0)
3512 sibcall_failure = true;
3513 else
3514 normal_failure = true;
3515 continue;
3516 }
3517
3518 if (store_one_arg (&args[i], argblock, flags,
3519 adjusted_args_size.var != 0,
3520 reg_parm_stack_space)
3521 || (pass == 0
3522 && check_sibcall_argument_overlap (insn: before_arg,
3523 arg: &args[i], mark_stored_args_map: true)))
3524 sibcall_failure = true;
3525 }
3526
3527 if (args[i].stack)
3528 call_fusage
3529 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
3530 gen_rtx_USE (VOIDmode, args[i].stack),
3531 call_fusage);
3532 }
3533
3534 /* If we have a parm that is passed in registers but not in memory
3535 and whose alignment does not permit a direct copy into registers,
3536 make a group of pseudos that correspond to each register that we
3537 will later fill. */
3538 if (STRICT_ALIGNMENT)
3539 store_unaligned_arguments_into_pseudos (args, num_actuals);
3540
3541 /* Now store any partially-in-registers parm.
3542 This is the last place a block-move can happen. */
3543 if (reg_parm_seen)
3544 for (i = 0; i < num_actuals; i++)
3545 if (args[i].partial != 0 && ! args[i].pass_on_stack)
3546 {
3547 rtx_insn *before_arg = get_last_insn ();
3548
3549 /* On targets with weird calling conventions (e.g. PA) it's
3550 hard to ensure that all cases of argument overlap between
3551 stack and registers work. Play it safe and bail out. */
3552 if (ARGS_GROW_DOWNWARD && !STACK_GROWS_DOWNWARD)
3553 {
3554 sibcall_failure = true;
3555 break;
3556 }
3557
3558 if (store_one_arg (&args[i], argblock, flags,
3559 adjusted_args_size.var != 0,
3560 reg_parm_stack_space)
3561 || (pass == 0
3562 && check_sibcall_argument_overlap (insn: before_arg,
3563 arg: &args[i], mark_stored_args_map: true)))
3564 sibcall_failure = true;
3565 }
3566
3567 /* Set up the next argument register. For sibling calls on machines
3568 with register windows this should be the incoming register. */
3569 if (pass == 0)
3570 next_arg_reg = targetm.calls.function_incoming_arg
3571 (args_so_far, function_arg_info::end_marker ());
3572 else
3573 next_arg_reg = targetm.calls.function_arg
3574 (args_so_far, function_arg_info::end_marker ());
3575
3576 targetm.calls.start_call_args (args_so_far);
3577
3578 bool any_regs = false;
3579 for (i = 0; i < num_actuals; i++)
3580 if (args[i].reg != NULL_RTX)
3581 {
3582 any_regs = true;
3583 targetm.calls.call_args (args_so_far, args[i].reg, funtype);
3584 }
3585 if (!any_regs)
3586 targetm.calls.call_args (args_so_far, pc_rtx, funtype);
3587
3588 /* Figure out the register where the value, if any, will come back. */
3589 valreg = 0;
3590 if (TYPE_MODE (rettype) != VOIDmode
3591 && ! structure_value_addr)
3592 {
3593 if (pcc_struct_value)
3594 valreg = hard_function_value (build_pointer_type (rettype),
3595 fndecl, NULL, (pass == 0));
3596 else
3597 valreg = hard_function_value (rettype, fndecl, fntype,
3598 (pass == 0));
3599
3600 /* If VALREG is a PARALLEL whose first member has a zero
3601 offset, use that. This is for targets such as m68k that
3602 return the same value in multiple places. */
3603 if (GET_CODE (valreg) == PARALLEL)
3604 {
3605 rtx elem = XVECEXP (valreg, 0, 0);
3606 rtx where = XEXP (elem, 0);
3607 rtx offset = XEXP (elem, 1);
3608 if (offset == const0_rtx
3609 && GET_MODE (where) == GET_MODE (valreg))
3610 valreg = where;
3611 }
3612 }
3613
3614 /* If register arguments require space on the stack and stack space
3615 was not preallocated, allocate stack space here for arguments
3616 passed in registers. */
3617 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
3618 && !ACCUMULATE_OUTGOING_ARGS
3619 && !must_preallocate && reg_parm_stack_space > 0)
3620 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
3621
3622 /* Pass the function the address in which to return a
3623 structure value. */
3624 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
3625 {
3626 structure_value_addr
3627 = convert_memory_address (Pmode, structure_value_addr);
3628 emit_move_insn (struct_value,
3629 force_reg (Pmode,
3630 force_operand (structure_value_addr,
3631 NULL_RTX)));
3632
3633 if (REG_P (struct_value))
3634 use_reg (fusage: &call_fusage, reg: struct_value);
3635 }
3636
3637 after_args = get_last_insn ();
3638 funexp = prepare_call_address (fndecl_or_type: fndecl ? fndecl : fntype, funexp,
3639 static_chain_value, call_fusage: &call_fusage,
3640 reg_parm_seen, flags);
3641
3642 load_register_parameters (args, num_actuals, call_fusage: &call_fusage, flags,
3643 is_sibcall: pass == 0, sibcall_failure: &sibcall_failure);
3644
3645 /* Save a pointer to the last insn before the call, so that we can
3646 later safely search backwards to find the CALL_INSN. */
3647 before_call = get_last_insn ();
3648
3649 if (pass == 1 && (return_flags & ERF_RETURNS_ARG))
3650 {
3651 int arg_nr = return_flags & ERF_RETURN_ARG_MASK;
3652 arg_nr = num_actuals - arg_nr - 1;
3653 if (arg_nr >= 0
3654 && arg_nr < num_actuals
3655 && args[arg_nr].reg
3656 && valreg
3657 && REG_P (valreg)
3658 && GET_MODE (args[arg_nr].reg) == GET_MODE (valreg))
3659 call_fusage
3660 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[arg_nr].tree_value)),
3661 gen_rtx_SET (valreg, args[arg_nr].reg),
3662 call_fusage);
3663 }
3664 /* All arguments and registers used for the call must be set up by
3665 now! */
3666
3667 /* Stack must be properly aligned now. */
3668 gcc_assert (!pass
3669 || multiple_p (stack_pointer_delta,
3670 preferred_unit_stack_boundary));
3671
3672 /* Generate the actual call instruction. */
3673 emit_call_1 (funexp, fntree: exp, fndecl, funtype, stack_size: unadjusted_args_size,
3674 rounded_stack_size: adjusted_args_size.constant, struct_value_size,
3675 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
3676 ecf_flags: flags, args_so_far);
3677
3678 if (flag_ipa_ra)
3679 {
3680 rtx_call_insn *last;
3681 rtx datum = NULL_RTX;
3682 if (fndecl != NULL_TREE)
3683 {
3684 datum = XEXP (DECL_RTL (fndecl), 0);
3685 gcc_assert (datum != NULL_RTX
3686 && GET_CODE (datum) == SYMBOL_REF);
3687 }
3688 last = last_call_insn ();
3689 add_reg_note (last, REG_CALL_DECL, datum);
3690 }
3691
3692 /* If the call setup or the call itself overlaps with anything
3693 of the argument setup we probably clobbered our call address.
3694 In that case we can't do sibcalls. */
3695 if (pass == 0
3696 && check_sibcall_argument_overlap (insn: after_args, arg: 0, mark_stored_args_map: false))
3697 sibcall_failure = true;
3698
3699 /* If a non-BLKmode value is returned at the most significant end
3700 of a register, shift the register right by the appropriate amount
3701 and update VALREG accordingly. BLKmode values are handled by the
3702 group load/store machinery below. */
3703 if (!structure_value_addr
3704 && !pcc_struct_value
3705 && TYPE_MODE (rettype) != VOIDmode
3706 && TYPE_MODE (rettype) != BLKmode
3707 && REG_P (valreg)
3708 && targetm.calls.return_in_msb (rettype))
3709 {
3710 if (shift_return_value (TYPE_MODE (rettype), left_p: false, value: valreg))
3711 sibcall_failure = true;
3712 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
3713 }
3714
3715 if (pass && (flags & ECF_MALLOC))
3716 {
3717 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3718 rtx_insn *last, *insns;
3719
3720 /* The return value from a malloc-like function is a pointer. */
3721 if (TREE_CODE (rettype) == POINTER_TYPE)
3722 mark_reg_pointer (temp, MALLOC_ABI_ALIGNMENT);
3723
3724 emit_move_insn (temp, valreg);
3725
3726 /* The return value from a malloc-like function cannot alias
3727 anything else. */
3728 last = get_last_insn ();
3729 add_reg_note (last, REG_NOALIAS, temp);
3730
3731 /* Write out the sequence. */
3732 insns = get_insns ();
3733 end_sequence ();
3734 emit_insn (insns);
3735 valreg = temp;
3736 }
3737
3738 /* For calls to `setjmp', etc., inform
3739 function.cc:setjmp_warnings that it should complain if
3740 nonvolatile values are live. For functions that cannot
3741 return, inform flow that control does not fall through. */
3742
3743 if ((flags & ECF_NORETURN) || pass == 0)
3744 {
3745 /* The barrier must be emitted
3746 immediately after the CALL_INSN. Some ports emit more
3747 than just a CALL_INSN above, so we must search for it here. */
3748
3749 rtx_insn *last = get_last_insn ();
3750 while (!CALL_P (last))
3751 {
3752 last = PREV_INSN (insn: last);
3753 /* There was no CALL_INSN? */
3754 gcc_assert (last != before_call);
3755 }
3756
3757 emit_barrier_after (last);
3758
3759 /* Stack adjustments after a noreturn call are dead code.
3760 However when NO_DEFER_POP is in effect, we must preserve
3761 stack_pointer_delta. */
3762 if (inhibit_defer_pop == 0)
3763 {
3764 stack_pointer_delta = old_stack_allocated;
3765 pending_stack_adjust = 0;
3766 }
3767 }
3768
3769 /* If value type not void, return an rtx for the value. */
3770
3771 if (TYPE_MODE (rettype) == VOIDmode
3772 || ignore)
3773 target = const0_rtx;
3774 else if (structure_value_addr)
3775 {
3776 if (target == 0 || !MEM_P (target))
3777 {
3778 target
3779 = gen_rtx_MEM (TYPE_MODE (rettype),
3780 memory_address (TYPE_MODE (rettype),
3781 structure_value_addr));
3782 set_mem_attributes (target, rettype, 1);
3783 }
3784 }
3785 else if (pcc_struct_value)
3786 {
3787 /* This is the special C++ case where we need to
3788 know what the true target was. We take care to
3789 never use this value more than once in one expression. */
3790 target = gen_rtx_MEM (TYPE_MODE (rettype),
3791 copy_to_reg (valreg));
3792 set_mem_attributes (target, rettype, 1);
3793 }
3794 /* Handle calls that return values in multiple non-contiguous locations.
3795 The Irix 6 ABI has examples of this. */
3796 else if (GET_CODE (valreg) == PARALLEL)
3797 {
3798 if (target == 0)
3799 target = emit_group_move_into_temps (valreg);
3800 else if (rtx_equal_p (target, valreg))
3801 ;
3802 else if (GET_CODE (target) == PARALLEL)
3803 /* Handle the result of a emit_group_move_into_temps
3804 call in the previous pass. */
3805 emit_group_move (target, valreg);
3806 else
3807 emit_group_store (target, valreg, rettype,
3808 int_size_in_bytes (rettype));
3809 }
3810 else if (target
3811 && GET_MODE (target) == TYPE_MODE (rettype)
3812 && GET_MODE (target) == GET_MODE (valreg))
3813 {
3814 bool may_overlap = false;
3815
3816 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
3817 reg to a plain register. */
3818 if (!REG_P (target) || HARD_REGISTER_P (target))
3819 valreg = avoid_likely_spilled_reg (x: valreg);
3820
3821 /* If TARGET is a MEM in the argument area, and we have
3822 saved part of the argument area, then we can't store
3823 directly into TARGET as it may get overwritten when we
3824 restore the argument save area below. Don't work too
3825 hard though and simply force TARGET to a register if it
3826 is a MEM; the optimizer is quite likely to sort it out. */
3827 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
3828 for (i = 0; i < num_actuals; i++)
3829 if (args[i].save_area)
3830 {
3831 may_overlap = true;
3832 break;
3833 }
3834
3835 if (may_overlap)
3836 target = copy_to_reg (valreg);
3837 else
3838 {
3839 /* TARGET and VALREG cannot be equal at this point
3840 because the latter would not have
3841 REG_FUNCTION_VALUE_P true, while the former would if
3842 it were referring to the same register.
3843
3844 If they refer to the same register, this move will be
3845 a no-op, except when function inlining is being
3846 done. */
3847 emit_move_insn (target, valreg);
3848
3849 /* If we are setting a MEM, this code must be executed.
3850 Since it is emitted after the call insn, sibcall
3851 optimization cannot be performed in that case. */
3852 if (MEM_P (target))
3853 sibcall_failure = true;
3854 }
3855 }
3856 else
3857 target = copy_to_reg (avoid_likely_spilled_reg (x: valreg));
3858
3859 /* If we promoted this return value, make the proper SUBREG.
3860 TARGET might be const0_rtx here, so be careful. */
3861 if (REG_P (target)
3862 && TYPE_MODE (rettype) != BLKmode
3863 && GET_MODE (target) != TYPE_MODE (rettype))
3864 {
3865 tree type = rettype;
3866 int unsignedp = TYPE_UNSIGNED (type);
3867 machine_mode ret_mode = TYPE_MODE (type);
3868 machine_mode pmode;
3869
3870 /* Ensure we promote as expected, and get the new unsignedness. */
3871 pmode = promote_function_mode (type, ret_mode, &unsignedp,
3872 funtype, 1);
3873 gcc_assert (GET_MODE (target) == pmode);
3874
3875 if (SCALAR_INT_MODE_P (pmode)
3876 && SCALAR_FLOAT_MODE_P (ret_mode)
3877 && known_gt (GET_MODE_SIZE (pmode), GET_MODE_SIZE (ret_mode)))
3878 target = convert_wider_int_to_float (mode: ret_mode, imode: pmode, x: target);
3879 else
3880 {
3881 target = gen_lowpart_SUBREG (ret_mode, target);
3882 SUBREG_PROMOTED_VAR_P (target) = 1;
3883 SUBREG_PROMOTED_SET (target, unsignedp);
3884 }
3885 }
3886
3887 /* If size of args is variable or this was a constructor call for a stack
3888 argument, restore saved stack-pointer value. */
3889
3890 if (old_stack_level)
3891 {
3892 rtx_insn *prev = get_last_insn ();
3893
3894 emit_stack_restore (SAVE_BLOCK, old_stack_level);
3895 stack_pointer_delta = old_stack_pointer_delta;
3896
3897 fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta);
3898
3899 pending_stack_adjust = old_pending_adj;
3900 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
3901 stack_arg_under_construction = old_stack_arg_under_construction;
3902 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3903 stack_usage_map = initial_stack_usage_map;
3904 stack_usage_watermark = initial_stack_usage_watermark;
3905 sibcall_failure = true;
3906 }
3907 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3908 {
3909#ifdef REG_PARM_STACK_SPACE
3910 if (save_area)
3911 restore_fixed_argument_area (save_area, argblock,
3912 high_to_save, low_to_save);
3913#endif
3914
3915 /* If we saved any argument areas, restore them. */
3916 for (i = 0; i < num_actuals; i++)
3917 if (args[i].save_area)
3918 {
3919 machine_mode save_mode = GET_MODE (args[i].save_area);
3920 rtx stack_area
3921 = gen_rtx_MEM (save_mode,
3922 memory_address (save_mode,
3923 XEXP (args[i].stack_slot, 0)));
3924
3925 if (save_mode != BLKmode)
3926 emit_move_insn (stack_area, args[i].save_area);
3927 else
3928 emit_block_move (stack_area, args[i].save_area,
3929 (gen_int_mode
3930 (args[i].locate.size.constant, Pmode)),
3931 BLOCK_OP_CALL_PARM);
3932 }
3933
3934 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3935 stack_usage_map = initial_stack_usage_map;
3936 stack_usage_watermark = initial_stack_usage_watermark;
3937 }
3938
3939 /* If this was alloca, record the new stack level. */
3940 if (flags & ECF_MAY_BE_ALLOCA)
3941 record_new_stack_level ();
3942
3943 /* Free up storage we no longer need. */
3944 for (i = 0; i < num_actuals; ++i)
3945 free (ptr: args[i].aligned_regs);
3946
3947 targetm.calls.end_call_args (args_so_far);
3948
3949 insns = get_insns ();
3950 end_sequence ();
3951
3952 if (pass == 0)
3953 {
3954 tail_call_insns = insns;
3955
3956 /* Restore the pending stack adjustment now that we have
3957 finished generating the sibling call sequence. */
3958
3959 restore_pending_stack_adjust (&save);
3960
3961 /* Prepare arg structure for next iteration. */
3962 for (i = 0; i < num_actuals; i++)
3963 {
3964 args[i].value = 0;
3965 args[i].aligned_regs = 0;
3966 args[i].stack = 0;
3967 }
3968
3969 sbitmap_free (map: stored_args_map);
3970 internal_arg_pointer_exp_state.scan_start = NULL;
3971 internal_arg_pointer_exp_state.cache.release ();
3972 }
3973 else
3974 {
3975 normal_call_insns = insns;
3976
3977 /* Verify that we've deallocated all the stack we used. */
3978 gcc_assert ((flags & ECF_NORETURN)
3979 || normal_failure
3980 || known_eq (old_stack_allocated,
3981 stack_pointer_delta
3982 - pending_stack_adjust));
3983 if (normal_failure)
3984 normal_call_insns = NULL;
3985 }
3986
3987 /* If something prevents making this a sibling call,
3988 zero out the sequence. */
3989 if (sibcall_failure)
3990 tail_call_insns = NULL;
3991 else
3992 break;
3993 }
3994
3995 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
3996 arguments too, as argument area is now clobbered by the call. */
3997 if (tail_call_insns)
3998 {
3999 emit_insn (tail_call_insns);
4000 crtl->tail_call_emit = true;
4001 }
4002 else
4003 {
4004 emit_insn (normal_call_insns);
4005 if (try_tail_call)
4006 /* Ideally we'd emit a message for all of the ways that it could
4007 have failed. */
4008 maybe_complain_about_tail_call (call_expr: exp, reason: "tail call production failed");
4009 }
4010
4011 currently_expanding_call--;
4012
4013 free (ptr: stack_usage_map_buf);
4014 free (ptr: args);
4015 return target;
4016}
4017
4018/* A sibling call sequence invalidates any REG_EQUIV notes made for
4019 this function's incoming arguments.
4020
4021 At the start of RTL generation we know the only REG_EQUIV notes
4022 in the rtl chain are those for incoming arguments, so we can look
4023 for REG_EQUIV notes between the start of the function and the
4024 NOTE_INSN_FUNCTION_BEG.
4025
4026 This is (slight) overkill. We could keep track of the highest
4027 argument we clobber and be more selective in removing notes, but it
4028 does not seem to be worth the effort. */
4029
4030void
4031fixup_tail_calls (void)
4032{
4033 rtx_insn *insn;
4034
4035 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4036 {
4037 rtx note;
4038
4039 /* There are never REG_EQUIV notes for the incoming arguments
4040 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
4041 if (NOTE_P (insn)
4042 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
4043 break;
4044
4045 note = find_reg_note (insn, REG_EQUIV, 0);
4046 if (note)
4047 remove_note (insn, note);
4048 note = find_reg_note (insn, REG_EQUIV, 0);
4049 gcc_assert (!note);
4050 }
4051}
4052
4053/* Traverse a list of TYPES and expand all complex types into their
4054 components. */
4055static tree
4056split_complex_types (tree types)
4057{
4058 tree p;
4059
4060 /* Before allocating memory, check for the common case of no complex. */
4061 for (p = types; p; p = TREE_CHAIN (p))
4062 {
4063 tree type = TREE_VALUE (p);
4064 if (TREE_CODE (type) == COMPLEX_TYPE
4065 && targetm.calls.split_complex_arg (type))
4066 goto found;
4067 }
4068 return types;
4069
4070 found:
4071 types = copy_list (types);
4072
4073 for (p = types; p; p = TREE_CHAIN (p))
4074 {
4075 tree complex_type = TREE_VALUE (p);
4076
4077 if (TREE_CODE (complex_type) == COMPLEX_TYPE
4078 && targetm.calls.split_complex_arg (complex_type))
4079 {
4080 tree next, imag;
4081
4082 /* Rewrite complex type with component type. */
4083 TREE_VALUE (p) = TREE_TYPE (complex_type);
4084 next = TREE_CHAIN (p);
4085
4086 /* Add another component type for the imaginary part. */
4087 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
4088 TREE_CHAIN (p) = imag;
4089 TREE_CHAIN (imag) = next;
4090
4091 /* Skip the newly created node. */
4092 p = TREE_CHAIN (p);
4093 }
4094 }
4095
4096 return types;
4097}
4098
4099/* Output a library call to function ORGFUN (a SYMBOL_REF rtx)
4100 for a value of mode OUTMODE,
4101 with NARGS different arguments, passed as ARGS.
4102 Store the return value if RETVAL is nonzero: store it in VALUE if
4103 VALUE is nonnull, otherwise pick a convenient location. In either
4104 case return the location of the stored value.
4105
4106 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
4107 `const' calls, LCT_PURE for `pure' calls, or another LCT_ value for
4108 other types of library calls. */
4109
4110rtx
4111emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
4112 enum libcall_type fn_type,
4113 machine_mode outmode, int nargs, rtx_mode_t *args)
4114{
4115 /* Total size in bytes of all the stack-parms scanned so far. */
4116 struct args_size args_size;
4117 /* Size of arguments before any adjustments (such as rounding). */
4118 struct args_size original_args_size;
4119 int argnum;
4120 rtx fun;
4121 /* Todo, choose the correct decl type of orgfun. Sadly this information
4122 isn't present here, so we default to native calling abi here. */
4123 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
4124 tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
4125 int count;
4126 rtx argblock = 0;
4127 CUMULATIVE_ARGS args_so_far_v;
4128 cumulative_args_t args_so_far;
4129 struct arg
4130 {
4131 rtx value;
4132 machine_mode mode;
4133 rtx reg;
4134 int partial;
4135 struct locate_and_pad_arg_data locate;
4136 rtx save_area;
4137 };
4138 struct arg *argvec;
4139 int old_inhibit_defer_pop = inhibit_defer_pop;
4140 rtx call_fusage = 0;
4141 rtx mem_value = 0;
4142 rtx valreg;
4143 bool pcc_struct_value = false;
4144 poly_int64 struct_value_size = 0;
4145 int flags;
4146 int reg_parm_stack_space = 0;
4147 poly_int64 needed;
4148 rtx_insn *before_call;
4149 bool have_push_fusage;
4150 tree tfom; /* type_for_mode (outmode, 0) */
4151
4152#ifdef REG_PARM_STACK_SPACE
4153 /* Define the boundary of the register parm stack space that needs to be
4154 save, if any. */
4155 int low_to_save = 0, high_to_save = 0;
4156 rtx save_area = 0; /* Place that it is saved. */
4157#endif
4158
4159 /* Size of the stack reserved for parameter registers. */
4160 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
4161 char *initial_stack_usage_map = stack_usage_map;
4162 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
4163 char *stack_usage_map_buf = NULL;
4164
4165 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
4166
4167#ifdef REG_PARM_STACK_SPACE
4168 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
4169#endif
4170
4171 /* By default, library functions cannot throw. */
4172 flags = ECF_NOTHROW;
4173
4174 switch (fn_type)
4175 {
4176 case LCT_NORMAL:
4177 break;
4178 case LCT_CONST:
4179 flags |= ECF_CONST;
4180 break;
4181 case LCT_PURE:
4182 flags |= ECF_PURE;
4183 break;
4184 case LCT_NORETURN:
4185 flags |= ECF_NORETURN;
4186 break;
4187 case LCT_THROW:
4188 flags &= ~ECF_NOTHROW;
4189 break;
4190 case LCT_RETURNS_TWICE:
4191 flags = ECF_RETURNS_TWICE;
4192 break;
4193 }
4194 fun = orgfun;
4195
4196 /* Ensure current function's preferred stack boundary is at least
4197 what we need. */
4198 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
4199 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4200
4201 /* If this kind of value comes back in memory,
4202 decide where in memory it should come back. */
4203 if (outmode != VOIDmode)
4204 {
4205 tfom = lang_hooks.types.type_for_mode (outmode, 0);
4206 if (aggregate_value_p (tfom, 0))
4207 {
4208#ifdef PCC_STATIC_STRUCT_RETURN
4209 rtx pointer_reg
4210 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
4211 mem_value = gen_rtx_MEM (outmode, pointer_reg);
4212 pcc_struct_value = true;
4213 if (value == 0)
4214 value = gen_reg_rtx (outmode);
4215#else /* not PCC_STATIC_STRUCT_RETURN */
4216 struct_value_size = GET_MODE_SIZE (mode: outmode);
4217 if (value != 0 && MEM_P (value))
4218 mem_value = value;
4219 else
4220 mem_value = assign_temp (tfom, 1, 1);
4221#endif
4222 /* This call returns a big structure. */
4223 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
4224 }
4225 }
4226 else
4227 tfom = void_type_node;
4228
4229 /* ??? Unfinished: must pass the memory address as an argument. */
4230
4231 /* Copy all the libcall-arguments out of the varargs data
4232 and into a vector ARGVEC.
4233
4234 Compute how to pass each argument. We only support a very small subset
4235 of the full argument passing conventions to limit complexity here since
4236 library functions shouldn't have many args. */
4237
4238 argvec = XALLOCAVEC (struct arg, nargs + 1);
4239 memset (s: argvec, c: 0, n: (nargs + 1) * sizeof (struct arg));
4240
4241#ifdef INIT_CUMULATIVE_LIBCALL_ARGS
4242 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
4243#else
4244 INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
4245#endif
4246 args_so_far = pack_cumulative_args (arg: &args_so_far_v);
4247
4248 args_size.constant = 0;
4249 args_size.var = 0;
4250
4251 count = 0;
4252
4253 push_temp_slots ();
4254
4255 /* If there's a structure value address to be passed,
4256 either pass it in the special place, or pass it as an extra argument. */
4257 if (mem_value && struct_value == 0 && ! pcc_struct_value)
4258 {
4259 rtx addr = XEXP (mem_value, 0);
4260
4261 nargs++;
4262
4263 /* Make sure it is a reasonable operand for a move or push insn. */
4264 if (!REG_P (addr) && !MEM_P (addr)
4265 && !(CONSTANT_P (addr)
4266 && targetm.legitimate_constant_p (Pmode, addr)))
4267 addr = force_operand (addr, NULL_RTX);
4268
4269 argvec[count].value = addr;
4270 argvec[count].mode = Pmode;
4271 argvec[count].partial = 0;
4272
4273 function_arg_info ptr_arg (Pmode, /*named=*/true);
4274 argvec[count].reg = targetm.calls.function_arg (args_so_far, ptr_arg);
4275 gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, ptr_arg) == 0);
4276
4277 locate_and_pad_parm (Pmode, NULL_TREE,
4278#ifdef STACK_PARMS_IN_REG_PARM_AREA
4279 1,
4280#else
4281 argvec[count].reg != 0,
4282#endif
4283 reg_parm_stack_space, 0,
4284 NULL_TREE, &args_size, &argvec[count].locate);
4285
4286 if (argvec[count].reg == 0 || argvec[count].partial != 0
4287 || reg_parm_stack_space > 0)
4288 args_size.constant += argvec[count].locate.size.constant;
4289
4290 targetm.calls.function_arg_advance (args_so_far, ptr_arg);
4291
4292 count++;
4293 }
4294
4295 for (unsigned int i = 0; count < nargs; i++, count++)
4296 {
4297 rtx val = args[i].first;
4298 function_arg_info arg (args[i].second, /*named=*/true);
4299 int unsigned_p = 0;
4300
4301 /* We cannot convert the arg value to the mode the library wants here;
4302 must do it earlier where we know the signedness of the arg. */
4303 gcc_assert (arg.mode != BLKmode
4304 && (GET_MODE (val) == arg.mode
4305 || GET_MODE (val) == VOIDmode));
4306
4307 /* Make sure it is a reasonable operand for a move or push insn. */
4308 if (!REG_P (val) && !MEM_P (val)
4309 && !(CONSTANT_P (val)
4310 && targetm.legitimate_constant_p (arg.mode, val)))
4311 val = force_operand (val, NULL_RTX);
4312
4313 if (pass_by_reference (ca: &args_so_far_v, arg))
4314 {
4315 rtx slot;
4316 int must_copy = !reference_callee_copied (ca: &args_so_far_v, arg);
4317
4318 /* If this was a CONST function, it is now PURE since it now
4319 reads memory. */
4320 if (flags & ECF_CONST)
4321 {
4322 flags &= ~ECF_CONST;
4323 flags |= ECF_PURE;
4324 }
4325
4326 if (MEM_P (val) && !must_copy)
4327 {
4328 tree val_expr = MEM_EXPR (val);
4329 if (val_expr)
4330 mark_addressable (val_expr);
4331 slot = val;
4332 }
4333 else
4334 {
4335 slot = assign_temp (lang_hooks.types.type_for_mode (arg.mode, 0),
4336 1, 1);
4337 emit_move_insn (slot, val);
4338 }
4339
4340 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4341 gen_rtx_USE (VOIDmode, slot),
4342 call_fusage);
4343 if (must_copy)
4344 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4345 gen_rtx_CLOBBER (VOIDmode,
4346 slot),
4347 call_fusage);
4348
4349 arg.mode = Pmode;
4350 arg.pass_by_reference = true;
4351 val = force_operand (XEXP (slot, 0), NULL_RTX);
4352 }
4353
4354 arg.mode = promote_function_mode (NULL_TREE, arg.mode, &unsigned_p,
4355 NULL_TREE, 0);
4356 argvec[count].mode = arg.mode;
4357 argvec[count].value = convert_modes (mode: arg.mode, GET_MODE (val), x: val,
4358 unsignedp: unsigned_p);
4359 argvec[count].reg = targetm.calls.function_arg (args_so_far, arg);
4360
4361 argvec[count].partial
4362 = targetm.calls.arg_partial_bytes (args_so_far, arg);
4363
4364 if (argvec[count].reg == 0
4365 || argvec[count].partial != 0
4366 || reg_parm_stack_space > 0)
4367 {
4368 locate_and_pad_parm (arg.mode, NULL_TREE,
4369#ifdef STACK_PARMS_IN_REG_PARM_AREA
4370 1,
4371#else
4372 argvec[count].reg != 0,
4373#endif
4374 reg_parm_stack_space, argvec[count].partial,
4375 NULL_TREE, &args_size, &argvec[count].locate);
4376 args_size.constant += argvec[count].locate.size.constant;
4377 gcc_assert (!argvec[count].locate.size.var);
4378 }
4379#ifdef BLOCK_REG_PADDING
4380 else
4381 /* The argument is passed entirely in registers. See at which
4382 end it should be padded. */
4383 argvec[count].locate.where_pad =
4384 BLOCK_REG_PADDING (arg.mode, NULL_TREE,
4385 known_le (GET_MODE_SIZE (arg.mode),
4386 UNITS_PER_WORD));
4387#endif
4388
4389 targetm.calls.function_arg_advance (args_so_far, arg);
4390 }
4391
4392 for (int i = 0; i < nargs; i++)
4393 if (reg_parm_stack_space > 0
4394 || argvec[i].reg == 0
4395 || argvec[i].partial != 0)
4396 update_stack_alignment_for_call (locate: &argvec[i].locate);
4397
4398 /* If this machine requires an external definition for library
4399 functions, write one out. */
4400 assemble_external_libcall (fun);
4401
4402 original_args_size = args_size;
4403 args_size.constant = (aligned_upper_bound (value: args_size.constant
4404 + stack_pointer_delta,
4405 STACK_BYTES)
4406 - stack_pointer_delta);
4407
4408 args_size.constant = upper_bound (a: args_size.constant,
4409 b: reg_parm_stack_space);
4410
4411 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
4412 args_size.constant -= reg_parm_stack_space;
4413
4414 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
4415 b: args_size.constant);
4416
4417 if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
4418 {
4419 poly_int64 pushed = args_size.constant + pending_stack_adjust;
4420 current_function_pushed_stack_size
4421 = upper_bound (current_function_pushed_stack_size, b: pushed);
4422 }
4423
4424 if (ACCUMULATE_OUTGOING_ARGS)
4425 {
4426 /* Since the stack pointer will never be pushed, it is possible for
4427 the evaluation of a parm to clobber something we have already
4428 written to the stack. Since most function calls on RISC machines
4429 do not use the stack, this is uncommon, but must work correctly.
4430
4431 Therefore, we save any area of the stack that was already written
4432 and that we are using. Here we set up to do this by making a new
4433 stack usage map from the old one.
4434
4435 Another approach might be to try to reorder the argument
4436 evaluations to avoid this conflicting stack usage. */
4437
4438 needed = args_size.constant;
4439
4440 /* Since we will be writing into the entire argument area, the
4441 map must be allocated for its entire size, not just the part that
4442 is the responsibility of the caller. */
4443 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
4444 needed += reg_parm_stack_space;
4445
4446 poly_int64 limit = needed;
4447 if (ARGS_GROW_DOWNWARD)
4448 limit += 1;
4449
4450 /* For polynomial sizes, this is the maximum possible size needed
4451 for arguments with a constant size and offset. */
4452 HOST_WIDE_INT const_limit = constant_lower_bound (a: limit);
4453 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
4454 const_limit);
4455
4456 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
4457 stack_usage_map = stack_usage_map_buf;
4458
4459 if (initial_highest_arg_in_use)
4460 memcpy (dest: stack_usage_map, src: initial_stack_usage_map,
4461 n: initial_highest_arg_in_use);
4462
4463 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
4464 memset (s: &stack_usage_map[initial_highest_arg_in_use], c: 0,
4465 n: highest_outgoing_arg_in_use - initial_highest_arg_in_use);
4466 needed = 0;
4467
4468 /* We must be careful to use virtual regs before they're instantiated,
4469 and real regs afterwards. Loop optimization, for example, can create
4470 new libcalls after we've instantiated the virtual regs, and if we
4471 use virtuals anyway, they won't match the rtl patterns. */
4472
4473 if (virtuals_instantiated)
4474 argblock = plus_constant (Pmode, stack_pointer_rtx,
4475 STACK_POINTER_OFFSET);
4476 else
4477 argblock = virtual_outgoing_args_rtx;
4478 }
4479 else
4480 {
4481 if (!targetm.calls.push_argument (0))
4482 argblock = push_block (gen_int_mode (args_size.constant, Pmode), 0, 0);
4483 }
4484
4485 /* We push args individually in reverse order, perform stack alignment
4486 before the first push (the last arg). */
4487 if (argblock == 0)
4488 anti_adjust_stack (gen_int_mode (args_size.constant
4489 - original_args_size.constant,
4490 Pmode));
4491
4492 argnum = nargs - 1;
4493
4494#ifdef REG_PARM_STACK_SPACE
4495 if (ACCUMULATE_OUTGOING_ARGS)
4496 {
4497 /* The argument list is the property of the called routine and it
4498 may clobber it. If the fixed area has been used for previous
4499 parameters, we must save and restore it. */
4500 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
4501 low_to_save: &low_to_save, high_to_save: &high_to_save);
4502 }
4503#endif
4504
4505 rtx call_cookie
4506 = targetm.calls.function_arg (args_so_far,
4507 function_arg_info::end_marker ());
4508
4509 /* Push the args that need to be pushed. */
4510
4511 have_push_fusage = false;
4512
4513 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4514 are to be pushed. */
4515 for (count = 0; count < nargs; count++, argnum--)
4516 {
4517 machine_mode mode = argvec[argnum].mode;
4518 rtx val = argvec[argnum].value;
4519 rtx reg = argvec[argnum].reg;
4520 int partial = argvec[argnum].partial;
4521 unsigned int parm_align = argvec[argnum].locate.boundary;
4522 poly_int64 lower_bound = 0, upper_bound = 0;
4523
4524 if (! (reg != 0 && partial == 0))
4525 {
4526 rtx use;
4527
4528 if (ACCUMULATE_OUTGOING_ARGS)
4529 {
4530 /* If this is being stored into a pre-allocated, fixed-size,
4531 stack area, save any previous data at that location. */
4532
4533 if (ARGS_GROW_DOWNWARD)
4534 {
4535 /* stack_slot is negative, but we want to index stack_usage_map
4536 with positive values. */
4537 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
4538 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
4539 }
4540 else
4541 {
4542 lower_bound = argvec[argnum].locate.slot_offset.constant;
4543 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
4544 }
4545
4546 if (stack_region_maybe_used_p (lower_bound, upper_bound,
4547 reg_parm_stack_space))
4548 {
4549 /* We need to make a save area. */
4550 poly_uint64 size
4551 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
4552 machine_mode save_mode
4553 = int_mode_for_size (size, limit: 1).else_blk ();
4554 rtx adr
4555 = plus_constant (Pmode, argblock,
4556 argvec[argnum].locate.offset.constant);
4557 rtx stack_area
4558 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
4559
4560 if (save_mode == BLKmode)
4561 {
4562 argvec[argnum].save_area
4563 = assign_stack_temp (BLKmode,
4564 argvec[argnum].locate.size.constant
4565 );
4566
4567 emit_block_move (validize_mem
4568 (copy_rtx (argvec[argnum].save_area)),
4569 stack_area,
4570 (gen_int_mode
4571 (argvec[argnum].locate.size.constant,
4572 Pmode)),
4573 BLOCK_OP_CALL_PARM);
4574 }
4575 else
4576 {
4577 argvec[argnum].save_area = gen_reg_rtx (save_mode);
4578
4579 emit_move_insn (argvec[argnum].save_area, stack_area);
4580 }
4581 }
4582 }
4583
4584 emit_push_insn (val, mode, lang_hooks.types.type_for_mode (mode, 0),
4585 NULL_RTX, parm_align, partial, reg, 0, argblock,
4586 (gen_int_mode
4587 (argvec[argnum].locate.offset.constant, Pmode)),
4588 reg_parm_stack_space,
4589 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad), false);
4590
4591 /* Now mark the segment we just used. */
4592 if (ACCUMULATE_OUTGOING_ARGS)
4593 mark_stack_region_used (lower_bound, upper_bound);
4594
4595 NO_DEFER_POP;
4596
4597 /* Indicate argument access so that alias.cc knows that these
4598 values are live. */
4599 if (argblock)
4600 use = plus_constant (Pmode, argblock,
4601 argvec[argnum].locate.offset.constant);
4602 else if (have_push_fusage)
4603 continue;
4604 else
4605 {
4606 /* When arguments are pushed, trying to tell alias.cc where
4607 exactly this argument is won't work, because the
4608 auto-increment causes confusion. So we merely indicate
4609 that we access something with a known mode somewhere on
4610 the stack. */
4611 use = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4612 gen_rtx_SCRATCH (Pmode));
4613 have_push_fusage = true;
4614 }
4615 use = gen_rtx_MEM (argvec[argnum].mode, use);
4616 use = gen_rtx_USE (VOIDmode, use);
4617 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
4618 }
4619 }
4620
4621 argnum = nargs - 1;
4622
4623 fun = prepare_call_address (NULL, funexp: fun, NULL, call_fusage: &call_fusage, reg_parm_seen: 0, flags: 0);
4624
4625 targetm.calls.start_call_args (args_so_far);
4626
4627 /* When expanding a normal call, args are stored in push order,
4628 which is the reverse of what we have here. */
4629 bool any_regs = false;
4630 for (int i = nargs; i-- > 0; )
4631 if (argvec[i].reg != NULL_RTX)
4632 {
4633 targetm.calls.call_args (args_so_far, argvec[i].reg, NULL_TREE);
4634 any_regs = true;
4635 }
4636 if (!any_regs)
4637 targetm.calls.call_args (args_so_far, pc_rtx, NULL_TREE);
4638
4639 /* Now load any reg parms into their regs. */
4640
4641 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4642 are to be pushed. */
4643 for (count = 0; count < nargs; count++, argnum--)
4644 {
4645 machine_mode mode = argvec[argnum].mode;
4646 rtx val = argvec[argnum].value;
4647 rtx reg = argvec[argnum].reg;
4648 int partial = argvec[argnum].partial;
4649
4650 /* Handle calls that pass values in multiple non-contiguous
4651 locations. The PA64 has examples of this for library calls. */
4652 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4653 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
4654 else if (reg != 0 && partial == 0)
4655 {
4656 emit_move_insn (reg, val);
4657#ifdef BLOCK_REG_PADDING
4658 poly_int64 size = GET_MODE_SIZE (argvec[argnum].mode);
4659
4660 /* Copied from load_register_parameters. */
4661
4662 /* Handle case where we have a value that needs shifting
4663 up to the msb. eg. a QImode value and we're padding
4664 upward on a BYTES_BIG_ENDIAN machine. */
4665 if (known_lt (size, UNITS_PER_WORD)
4666 && (argvec[argnum].locate.where_pad
4667 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
4668 {
4669 rtx x;
4670 poly_int64 shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
4671
4672 /* Assigning REG here rather than a temp makes CALL_FUSAGE
4673 report the whole reg as used. Strictly speaking, the
4674 call only uses SIZE bytes at the msb end, but it doesn't
4675 seem worth generating rtl to say that. */
4676 reg = gen_rtx_REG (word_mode, REGNO (reg));
4677 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
4678 if (x != reg)
4679 emit_move_insn (reg, x);
4680 }
4681#endif
4682 }
4683
4684 NO_DEFER_POP;
4685 }
4686
4687 /* Any regs containing parms remain in use through the call. */
4688 for (count = 0; count < nargs; count++)
4689 {
4690 rtx reg = argvec[count].reg;
4691 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4692 use_group_regs (&call_fusage, reg);
4693 else if (reg != 0)
4694 {
4695 int partial = argvec[count].partial;
4696 if (partial)
4697 {
4698 int nregs;
4699 gcc_assert (partial % UNITS_PER_WORD == 0);
4700 nregs = partial / UNITS_PER_WORD;
4701 use_regs (&call_fusage, REGNO (reg), nregs);
4702 }
4703 else
4704 use_reg (fusage: &call_fusage, reg);
4705 }
4706 }
4707
4708 /* Pass the function the address in which to return a structure value. */
4709 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
4710 {
4711 emit_move_insn (struct_value,
4712 force_reg (Pmode,
4713 force_operand (XEXP (mem_value, 0),
4714 NULL_RTX)));
4715 if (REG_P (struct_value))
4716 use_reg (fusage: &call_fusage, reg: struct_value);
4717 }
4718
4719 /* Don't allow popping to be deferred, since then
4720 cse'ing of library calls could delete a call and leave the pop. */
4721 NO_DEFER_POP;
4722 valreg = (mem_value == 0 && outmode != VOIDmode
4723 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
4724
4725 /* Stack must be properly aligned now. */
4726 gcc_assert (multiple_p (stack_pointer_delta,
4727 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT));
4728
4729 before_call = get_last_insn ();
4730
4731 if (flag_callgraph_info)
4732 record_final_call (SYMBOL_REF_DECL (orgfun), UNKNOWN_LOCATION);
4733
4734 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
4735 will set inhibit_defer_pop to that value. */
4736 /* The return type is needed to decide how many bytes the function pops.
4737 Signedness plays no role in that, so for simplicity, we pretend it's
4738 always signed. We also assume that the list of arguments passed has
4739 no impact, so we pretend it is unknown. */
4740
4741 emit_call_1 (funexp: fun, NULL,
4742 get_identifier (XSTR (orgfun, 0)),
4743 funtype: build_function_type (tfom, NULL_TREE),
4744 stack_size: original_args_size.constant, rounded_stack_size: args_size.constant,
4745 struct_value_size, next_arg_reg: call_cookie, valreg,
4746 old_inhibit_defer_pop: old_inhibit_defer_pop + 1, call_fusage, ecf_flags: flags, args_so_far);
4747
4748 if (flag_ipa_ra)
4749 {
4750 rtx datum = orgfun;
4751 gcc_assert (GET_CODE (datum) == SYMBOL_REF);
4752 rtx_call_insn *last = last_call_insn ();
4753 add_reg_note (last, REG_CALL_DECL, datum);
4754 }
4755
4756 /* Right-shift returned value if necessary. */
4757 if (!pcc_struct_value
4758 && TYPE_MODE (tfom) != BLKmode
4759 && targetm.calls.return_in_msb (tfom))
4760 {
4761 shift_return_value (TYPE_MODE (tfom), left_p: false, value: valreg);
4762 valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg));
4763 }
4764
4765 targetm.calls.end_call_args (args_so_far);
4766
4767 /* For calls to `setjmp', etc., inform function.cc:setjmp_warnings
4768 that it should complain if nonvolatile values are live. For
4769 functions that cannot return, inform flow that control does not
4770 fall through. */
4771 if (flags & ECF_NORETURN)
4772 {
4773 /* The barrier note must be emitted
4774 immediately after the CALL_INSN. Some ports emit more than
4775 just a CALL_INSN above, so we must search for it here. */
4776 rtx_insn *last = get_last_insn ();
4777 while (!CALL_P (last))
4778 {
4779 last = PREV_INSN (insn: last);
4780 /* There was no CALL_INSN? */
4781 gcc_assert (last != before_call);
4782 }
4783
4784 emit_barrier_after (last);
4785 }
4786
4787 /* Consider that "regular" libcalls, i.e. all of them except for LCT_THROW
4788 and LCT_RETURNS_TWICE, cannot perform non-local gotos. */
4789 if (flags & ECF_NOTHROW)
4790 {
4791 rtx_insn *last = get_last_insn ();
4792 while (!CALL_P (last))
4793 {
4794 last = PREV_INSN (insn: last);
4795 /* There was no CALL_INSN? */
4796 gcc_assert (last != before_call);
4797 }
4798
4799 make_reg_eh_region_note_nothrow_nononlocal (last);
4800 }
4801
4802 /* Now restore inhibit_defer_pop to its actual original value. */
4803 OK_DEFER_POP;
4804
4805 pop_temp_slots ();
4806
4807 /* Copy the value to the right place. */
4808 if (outmode != VOIDmode && retval)
4809 {
4810 if (mem_value)
4811 {
4812 if (value == 0)
4813 value = mem_value;
4814 if (value != mem_value)
4815 emit_move_insn (value, mem_value);
4816 }
4817 else if (GET_CODE (valreg) == PARALLEL)
4818 {
4819 if (value == 0)
4820 value = gen_reg_rtx (outmode);
4821 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (mode: outmode));
4822 }
4823 else
4824 {
4825 /* Convert to the proper mode if a promotion has been active. */
4826 if (GET_MODE (valreg) != outmode)
4827 {
4828 int unsignedp = TYPE_UNSIGNED (tfom);
4829
4830 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
4831 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
4832 == GET_MODE (valreg));
4833 valreg = convert_modes (mode: outmode, GET_MODE (valreg), x: valreg, unsignedp: 0);
4834 }
4835
4836 if (value != 0)
4837 emit_move_insn (value, valreg);
4838 else
4839 value = valreg;
4840 }
4841 }
4842
4843 if (ACCUMULATE_OUTGOING_ARGS)
4844 {
4845#ifdef REG_PARM_STACK_SPACE
4846 if (save_area)
4847 restore_fixed_argument_area (save_area, argblock,
4848 high_to_save, low_to_save);
4849#endif
4850
4851 /* If we saved any argument areas, restore them. */
4852 for (count = 0; count < nargs; count++)
4853 if (argvec[count].save_area)
4854 {
4855 machine_mode save_mode = GET_MODE (argvec[count].save_area);
4856 rtx adr = plus_constant (Pmode, argblock,
4857 argvec[count].locate.offset.constant);
4858 rtx stack_area = gen_rtx_MEM (save_mode,
4859 memory_address (save_mode, adr));
4860
4861 if (save_mode == BLKmode)
4862 emit_block_move (stack_area,
4863 validize_mem
4864 (copy_rtx (argvec[count].save_area)),
4865 (gen_int_mode
4866 (argvec[count].locate.size.constant, Pmode)),
4867 BLOCK_OP_CALL_PARM);
4868 else
4869 emit_move_insn (stack_area, argvec[count].save_area);
4870 }
4871
4872 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4873 stack_usage_map = initial_stack_usage_map;
4874 stack_usage_watermark = initial_stack_usage_watermark;
4875 }
4876
4877 free (ptr: stack_usage_map_buf);
4878
4879 return value;
4880
4881}
4882
4883
4884/* Store a single argument for a function call
4885 into the register or memory area where it must be passed.
4886 *ARG describes the argument value and where to pass it.
4887
4888 ARGBLOCK is the address of the stack-block for all the arguments,
4889 or 0 on a machine where arguments are pushed individually.
4890
4891 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4892 so must be careful about how the stack is used.
4893
4894 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4895 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4896 that we need not worry about saving and restoring the stack.
4897
4898 FNDECL is the declaration of the function we are calling.
4899
4900 Return true if this arg should cause sibcall failure,
4901 false otherwise. */
4902
4903static bool
4904store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4905 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
4906{
4907 tree pval = arg->tree_value;
4908 rtx reg = 0;
4909 int partial = 0;
4910 poly_int64 used = 0;
4911 poly_int64 lower_bound = 0, upper_bound = 0;
4912 bool sibcall_failure = false;
4913
4914 if (TREE_CODE (pval) == ERROR_MARK)
4915 return true;
4916
4917 /* Push a new temporary level for any temporaries we make for
4918 this argument. */
4919 push_temp_slots ();
4920
4921 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4922 {
4923 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4924 save any previous data at that location. */
4925 if (argblock && ! variable_size && arg->stack)
4926 {
4927 if (ARGS_GROW_DOWNWARD)
4928 {
4929 /* stack_slot is negative, but we want to index stack_usage_map
4930 with positive values. */
4931 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4932 {
4933 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
4934 upper_bound = -rtx_to_poly_int64 (x: offset) + 1;
4935 }
4936 else
4937 upper_bound = 0;
4938
4939 lower_bound = upper_bound - arg->locate.size.constant;
4940 }
4941 else
4942 {
4943 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4944 {
4945 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
4946 lower_bound = rtx_to_poly_int64 (x: offset);
4947 }
4948 else
4949 lower_bound = 0;
4950
4951 upper_bound = lower_bound + arg->locate.size.constant;
4952 }
4953
4954 if (stack_region_maybe_used_p (lower_bound, upper_bound,
4955 reg_parm_stack_space))
4956 {
4957 /* We need to make a save area. */
4958 poly_uint64 size = arg->locate.size.constant * BITS_PER_UNIT;
4959 machine_mode save_mode
4960 = int_mode_for_size (size, limit: 1).else_blk ();
4961 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4962 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4963
4964 if (save_mode == BLKmode)
4965 {
4966 arg->save_area
4967 = assign_temp (TREE_TYPE (arg->tree_value), 1, 1);
4968 preserve_temp_slots (arg->save_area);
4969 emit_block_move (validize_mem (copy_rtx (arg->save_area)),
4970 stack_area,
4971 (gen_int_mode
4972 (arg->locate.size.constant, Pmode)),
4973 BLOCK_OP_CALL_PARM);
4974 }
4975 else
4976 {
4977 arg->save_area = gen_reg_rtx (save_mode);
4978 emit_move_insn (arg->save_area, stack_area);
4979 }
4980 }
4981 }
4982 }
4983
4984 /* If this isn't going to be placed on both the stack and in registers,
4985 set up the register and number of words. */
4986 if (! arg->pass_on_stack)
4987 {
4988 if (flags & ECF_SIBCALL)
4989 reg = arg->tail_call_reg;
4990 else
4991 reg = arg->reg;
4992 partial = arg->partial;
4993 }
4994
4995 /* Being passed entirely in a register. We shouldn't be called in
4996 this case. */
4997 gcc_assert (reg == 0 || partial != 0);
4998
4999 /* If this arg needs special alignment, don't load the registers
5000 here. */
5001 if (arg->n_aligned_regs != 0)
5002 reg = 0;
5003
5004 /* If this is being passed partially in a register, we can't evaluate
5005 it directly into its stack slot. Otherwise, we can. */
5006 if (arg->value == 0)
5007 {
5008 /* stack_arg_under_construction is nonzero if a function argument is
5009 being evaluated directly into the outgoing argument list and
5010 expand_call must take special action to preserve the argument list
5011 if it is called recursively.
5012
5013 For scalar function arguments stack_usage_map is sufficient to
5014 determine which stack slots must be saved and restored. Scalar
5015 arguments in general have pass_on_stack == false.
5016
5017 If this argument is initialized by a function which takes the
5018 address of the argument (a C++ constructor or a C function
5019 returning a BLKmode structure), then stack_usage_map is
5020 insufficient and expand_call must push the stack around the
5021 function call. Such arguments have pass_on_stack == true.
5022
5023 Note that it is always safe to set stack_arg_under_construction,
5024 but this generates suboptimal code if set when not needed. */
5025
5026 if (arg->pass_on_stack)
5027 stack_arg_under_construction++;
5028
5029 arg->value = expand_expr (exp: pval,
5030 target: (partial
5031 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
5032 ? NULL_RTX : arg->stack,
5033 VOIDmode, modifier: EXPAND_STACK_PARM);
5034
5035 /* If we are promoting object (or for any other reason) the mode
5036 doesn't agree, convert the mode. */
5037
5038 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
5039 arg->value = convert_modes (mode: arg->mode, TYPE_MODE (TREE_TYPE (pval)),
5040 x: arg->value, unsignedp: arg->unsignedp);
5041
5042 if (arg->pass_on_stack)
5043 stack_arg_under_construction--;
5044 }
5045
5046 /* Check for overlap with already clobbered argument area. */
5047 if ((flags & ECF_SIBCALL)
5048 && MEM_P (arg->value)
5049 && mem_might_overlap_already_clobbered_arg_p (XEXP (arg->value, 0),
5050 size: arg->locate.size.constant))
5051 sibcall_failure = true;
5052
5053 /* Don't allow anything left on stack from computation
5054 of argument to alloca. */
5055 if (flags & ECF_MAY_BE_ALLOCA)
5056 do_pending_stack_adjust ();
5057
5058 if (arg->value == arg->stack)
5059 /* If the value is already in the stack slot, we are done. */
5060 ;
5061 else if (arg->mode != BLKmode)
5062 {
5063 unsigned int parm_align;
5064
5065 /* Argument is a scalar, not entirely passed in registers.
5066 (If part is passed in registers, arg->partial says how much
5067 and emit_push_insn will take care of putting it there.)
5068
5069 Push it, and if its size is less than the
5070 amount of space allocated to it,
5071 also bump stack pointer by the additional space.
5072 Note that in C the default argument promotions
5073 will prevent such mismatches. */
5074
5075 poly_int64 size = (TYPE_EMPTY_P (TREE_TYPE (pval))
5076 ? 0 : GET_MODE_SIZE (mode: arg->mode));
5077
5078 /* Compute how much space the push instruction will push.
5079 On many machines, pushing a byte will advance the stack
5080 pointer by a halfword. */
5081#ifdef PUSH_ROUNDING
5082 size = PUSH_ROUNDING (size);
5083#endif
5084 used = size;
5085
5086 /* Compute how much space the argument should get:
5087 round up to a multiple of the alignment for arguments. */
5088 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5089 != PAD_NONE)
5090 /* At the moment we don't (need to) support ABIs for which the
5091 padding isn't known at compile time. In principle it should
5092 be easy to add though. */
5093 used = force_align_up (value: size, PARM_BOUNDARY / BITS_PER_UNIT);
5094
5095 /* Compute the alignment of the pushed argument. */
5096 parm_align = arg->locate.boundary;
5097 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5098 == PAD_DOWNWARD)
5099 {
5100 poly_int64 pad = used - size;
5101 unsigned int pad_align = known_alignment (a: pad) * BITS_PER_UNIT;
5102 if (pad_align != 0)
5103 parm_align = MIN (parm_align, pad_align);
5104 }
5105
5106 /* This isn't already where we want it on the stack, so put it there.
5107 This can either be done with push or copy insns. */
5108 if (maybe_ne (a: used, b: 0)
5109 && !emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval),
5110 NULL_RTX, parm_align, partial, reg, used - size,
5111 argblock, ARGS_SIZE_RTX (arg->locate.offset),
5112 reg_parm_stack_space,
5113 ARGS_SIZE_RTX (arg->locate.alignment_pad), true))
5114 sibcall_failure = true;
5115
5116 /* Unless this is a partially-in-register argument, the argument is now
5117 in the stack. */
5118 if (partial == 0)
5119 arg->value = arg->stack;
5120 }
5121 else
5122 {
5123 /* BLKmode, at least partly to be pushed. */
5124
5125 unsigned int parm_align;
5126 poly_int64 excess;
5127 rtx size_rtx;
5128
5129 /* Pushing a nonscalar.
5130 If part is passed in registers, PARTIAL says how much
5131 and emit_push_insn will take care of putting it there. */
5132
5133 /* Round its size up to a multiple
5134 of the allocation unit for arguments. */
5135
5136 if (arg->locate.size.var != 0)
5137 {
5138 excess = 0;
5139 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
5140 }
5141 else
5142 {
5143 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
5144 for BLKmode is careful to avoid it. */
5145 excess = (arg->locate.size.constant
5146 - arg_int_size_in_bytes (TREE_TYPE (pval))
5147 + partial);
5148 size_rtx = expand_expr (exp: arg_size_in_bytes (TREE_TYPE (pval)),
5149 NULL_RTX, TYPE_MODE (sizetype),
5150 modifier: EXPAND_NORMAL);
5151 }
5152
5153 parm_align = arg->locate.boundary;
5154
5155 /* When an argument is padded down, the block is aligned to
5156 PARM_BOUNDARY, but the actual argument isn't. */
5157 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5158 == PAD_DOWNWARD)
5159 {
5160 if (arg->locate.size.var)
5161 parm_align = BITS_PER_UNIT;
5162 else
5163 {
5164 unsigned int excess_align
5165 = known_alignment (a: excess) * BITS_PER_UNIT;
5166 if (excess_align != 0)
5167 parm_align = MIN (parm_align, excess_align);
5168 }
5169 }
5170
5171 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
5172 {
5173 /* emit_push_insn might not work properly if arg->value and
5174 argblock + arg->locate.offset areas overlap. */
5175 rtx x = arg->value;
5176 poly_int64 i = 0;
5177
5178 if (strip_offset (XEXP (x, 0), &i)
5179 == crtl->args.internal_arg_pointer)
5180 {
5181 /* arg.locate doesn't contain the pretend_args_size offset,
5182 it's part of argblock. Ensure we don't count it in I. */
5183 if (STACK_GROWS_DOWNWARD)
5184 i -= crtl->args.pretend_args_size;
5185 else
5186 i += crtl->args.pretend_args_size;
5187
5188 /* expand_call should ensure this. */
5189 gcc_assert (!arg->locate.offset.var
5190 && arg->locate.size.var == 0);
5191 poly_int64 size_val = rtx_to_poly_int64 (x: size_rtx);
5192
5193 if (known_eq (arg->locate.offset.constant, i))
5194 {
5195 /* Even though they appear to be at the same location,
5196 if part of the outgoing argument is in registers,
5197 they aren't really at the same location. Check for
5198 this by making sure that the incoming size is the
5199 same as the outgoing size. */
5200 if (maybe_ne (a: arg->locate.size.constant, b: size_val))
5201 sibcall_failure = true;
5202 }
5203 else if (maybe_in_range_p (val: arg->locate.offset.constant,
5204 pos: i, size: size_val))
5205 sibcall_failure = true;
5206 /* Use arg->locate.size.constant instead of size_rtx
5207 because we only care about the part of the argument
5208 on the stack. */
5209 else if (maybe_in_range_p (val: i, pos: arg->locate.offset.constant,
5210 size: arg->locate.size.constant))
5211 sibcall_failure = true;
5212 }
5213 }
5214
5215 if (!CONST_INT_P (size_rtx) || INTVAL (size_rtx) != 0)
5216 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
5217 parm_align, partial, reg, excess, argblock,
5218 ARGS_SIZE_RTX (arg->locate.offset),
5219 reg_parm_stack_space,
5220 ARGS_SIZE_RTX (arg->locate.alignment_pad), false);
5221 /* If we bypass emit_push_insn because it is a zero sized argument,
5222 we still might need to adjust stack if such argument requires
5223 extra alignment. See PR104558. */
5224 else if ((arg->locate.alignment_pad.var
5225 || maybe_ne (a: arg->locate.alignment_pad.constant, b: 0))
5226 && !argblock)
5227 anti_adjust_stack (ARGS_SIZE_RTX (arg->locate.alignment_pad));
5228
5229 /* Unless this is a partially-in-register argument, the argument is now
5230 in the stack.
5231
5232 ??? Unlike the case above, in which we want the actual
5233 address of the data, so that we can load it directly into a
5234 register, here we want the address of the stack slot, so that
5235 it's properly aligned for word-by-word copying or something
5236 like that. It's not clear that this is always correct. */
5237 if (partial == 0)
5238 arg->value = arg->stack_slot;
5239 }
5240
5241 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
5242 {
5243 tree type = TREE_TYPE (arg->tree_value);
5244 arg->parallel_value
5245 = emit_group_load_into_temps (arg->reg, arg->value, type,
5246 int_size_in_bytes (type));
5247 }
5248
5249 /* Mark all slots this store used. */
5250 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
5251 && argblock && ! variable_size && arg->stack)
5252 mark_stack_region_used (lower_bound, upper_bound);
5253
5254 /* Once we have pushed something, pops can't safely
5255 be deferred during the rest of the arguments. */
5256 NO_DEFER_POP;
5257
5258 /* Free any temporary slots made in processing this argument. */
5259 pop_temp_slots ();
5260
5261 return sibcall_failure;
5262}
5263
5264/* Nonzero if we do not know how to pass ARG solely in registers. */
5265
5266bool
5267must_pass_in_stack_var_size (const function_arg_info &arg)
5268{
5269 if (!arg.type)
5270 return false;
5271
5272 /* If the type has variable size... */
5273 if (!poly_int_tree_p (TYPE_SIZE (arg.type)))
5274 return true;
5275
5276 /* If the type is marked as addressable (it is required
5277 to be constructed into the stack)... */
5278 if (TREE_ADDRESSABLE (arg.type))
5279 return true;
5280
5281 return false;
5282}
5283
5284/* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
5285 takes trailing padding of a structure into account. */
5286/* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
5287
5288bool
5289must_pass_in_stack_var_size_or_pad (const function_arg_info &arg)
5290{
5291 if (!arg.type)
5292 return false;
5293
5294 /* If the type has variable size... */
5295 if (TREE_CODE (TYPE_SIZE (arg.type)) != INTEGER_CST)
5296 return true;
5297
5298 /* If the type is marked as addressable (it is required
5299 to be constructed into the stack)... */
5300 if (TREE_ADDRESSABLE (arg.type))
5301 return true;
5302
5303 if (TYPE_EMPTY_P (arg.type))
5304 return false;
5305
5306 /* If the padding and mode of the type is such that a copy into
5307 a register would put it into the wrong part of the register. */
5308 if (arg.mode == BLKmode
5309 && int_size_in_bytes (arg.type) % (PARM_BOUNDARY / BITS_PER_UNIT)
5310 && (targetm.calls.function_arg_padding (arg.mode, arg.type)
5311 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
5312 return true;
5313
5314 return false;
5315}
5316
5317/* Return true if TYPE must be passed on the stack when passed to
5318 the "..." arguments of a function. */
5319
5320bool
5321must_pass_va_arg_in_stack (tree type)
5322{
5323 function_arg_info arg (type, /*named=*/false);
5324 return targetm.calls.must_pass_in_stack (arg);
5325}
5326
5327/* Return true if FIELD is the C++17 empty base field that should
5328 be ignored for ABI calling convention decisions in order to
5329 maintain ABI compatibility between C++14 and earlier, which doesn't
5330 add this FIELD to classes with empty bases, and C++17 and later
5331 which does. */
5332
5333bool
5334cxx17_empty_base_field_p (const_tree field)
5335{
5336 return (DECL_FIELD_ABI_IGNORED (field)
5337 && DECL_ARTIFICIAL (field)
5338 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (field))
5339 && !lookup_attribute (attr_name: "no_unique_address", DECL_ATTRIBUTES (field)));
5340}
5341

source code of gcc/calls.cc