1/* Memory address lowering and addressing mode selection.
2 Copyright (C) 2004-2024 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it
7under the terms of the GNU General Public License as published by the
8Free Software Foundation; either version 3, or (at your option) any
9later version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT
12ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20/* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
21 that directly map to addressing modes of the target. */
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
26#include "backend.h"
27#include "target.h"
28#include "rtl.h"
29#include "tree.h"
30#include "gimple.h"
31#include "memmodel.h"
32#include "stringpool.h"
33#include "tree-vrp.h"
34#include "tree-ssanames.h"
35#include "expmed.h"
36#include "insn-config.h"
37#include "emit-rtl.h"
38#include "recog.h"
39#include "tree-pretty-print.h"
40#include "fold-const.h"
41#include "stor-layout.h"
42#include "gimple-iterator.h"
43#include "gimplify-me.h"
44#include "tree-ssa-loop-ivopts.h"
45#include "expr.h"
46#include "tree-dfa.h"
47#include "dumpfile.h"
48#include "tree-affine.h"
49#include "gimplify.h"
50#include "builtins.h"
51
52/* FIXME: We compute address costs using RTL. */
53#include "tree-ssa-address.h"
54
55/* TODO -- handling of symbols (according to Richard Hendersons
56 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
57
58 There are at least 5 different kinds of symbols that we can run up against:
59
60 (1) binds_local_p, small data area.
61 (2) binds_local_p, eg local statics
62 (3) !binds_local_p, eg global variables
63 (4) thread local, local_exec
64 (5) thread local, !local_exec
65
66 Now, (1) won't appear often in an array context, but it certainly can.
67 All you have to do is set -GN high enough, or explicitly mark any
68 random object __attribute__((section (".sdata"))).
69
70 All of these affect whether or not a symbol is in fact a valid address.
71 The only one tested here is (3). And that result may very well
72 be incorrect for (4) or (5).
73
74 An incorrect result here does not cause incorrect results out the
75 back end, because the expander in expr.cc validizes the address. However
76 it would be nice to improve the handling here in order to produce more
77 precise results. */
78
79/* A "template" for memory address, used to determine whether the address is
80 valid for mode. */
81
82struct GTY (()) mem_addr_template {
83 rtx ref; /* The template. */
84 rtx * GTY ((skip)) step_p; /* The point in template where the step should be
85 filled in. */
86 rtx * GTY ((skip)) off_p; /* The point in template where the offset should
87 be filled in. */
88};
89
90
91/* The templates. Each of the low five bits of the index corresponds to one
92 component of TARGET_MEM_REF being present, while the high bits identify
93 the address space. See TEMPL_IDX. */
94
95static GTY(()) vec<mem_addr_template, va_gc> *mem_addr_template_list;
96
97#define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
98 (((int) (AS) << 5) \
99 | ((SYMBOL != 0) << 4) \
100 | ((BASE != 0) << 3) \
101 | ((INDEX != 0) << 2) \
102 | ((STEP != 0) << 1) \
103 | (OFFSET != 0))
104
105/* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
106 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
107 to where step is placed to *STEP_P and offset to *OFFSET_P. */
108
109static void
110gen_addr_rtx (machine_mode address_mode,
111 rtx symbol, rtx base, rtx index, rtx step, rtx offset,
112 rtx *addr, rtx **step_p, rtx **offset_p)
113{
114 rtx act_elem;
115
116 *addr = NULL_RTX;
117 if (step_p)
118 *step_p = NULL;
119 if (offset_p)
120 *offset_p = NULL;
121
122 if (index && index != const0_rtx)
123 {
124 act_elem = index;
125 if (step)
126 {
127 act_elem = gen_rtx_MULT (address_mode, act_elem, step);
128
129 if (step_p)
130 *step_p = &XEXP (act_elem, 1);
131 }
132
133 *addr = act_elem;
134 }
135
136 if (base && base != const0_rtx)
137 {
138 if (*addr)
139 *addr = simplify_gen_binary (code: PLUS, mode: address_mode, op0: base, op1: *addr);
140 else
141 *addr = base;
142 }
143
144 if (symbol)
145 {
146 act_elem = symbol;
147 if (offset)
148 {
149 act_elem = gen_rtx_PLUS (address_mode, act_elem, offset);
150
151 if (offset_p)
152 *offset_p = &XEXP (act_elem, 1);
153
154 if (GET_CODE (symbol) == SYMBOL_REF
155 || GET_CODE (symbol) == LABEL_REF
156 || GET_CODE (symbol) == CONST)
157 act_elem = gen_rtx_CONST (address_mode, act_elem);
158 }
159
160 if (*addr)
161 *addr = gen_rtx_PLUS (address_mode, *addr, act_elem);
162 else
163 *addr = act_elem;
164 }
165 else if (offset)
166 {
167 if (*addr)
168 {
169 *addr = gen_rtx_PLUS (address_mode, *addr, offset);
170 if (offset_p)
171 *offset_p = &XEXP (*addr, 1);
172 }
173 else
174 {
175 *addr = offset;
176 if (offset_p)
177 *offset_p = addr;
178 }
179 }
180
181 if (!*addr)
182 *addr = const0_rtx;
183}
184
185/* Returns address for TARGET_MEM_REF with parameters given by ADDR
186 in address space AS.
187 If REALLY_EXPAND is false, just make fake registers instead
188 of really expanding the operands, and perform the expansion in-place
189 by using one of the "templates". */
190
191rtx
192addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
193 bool really_expand)
194{
195 scalar_int_mode address_mode = targetm.addr_space.address_mode (as);
196 scalar_int_mode pointer_mode = targetm.addr_space.pointer_mode (as);
197 rtx address, sym, bse, idx, st, off;
198 struct mem_addr_template *templ;
199
200 if (addr->step && !integer_onep (addr->step))
201 st = immed_wide_int_const (wi::to_wide (t: addr->step), pointer_mode);
202 else
203 st = NULL_RTX;
204
205 if (addr->offset && !integer_zerop (addr->offset))
206 {
207 poly_offset_int dc
208 = poly_offset_int::from (a: wi::to_poly_wide (t: addr->offset), sgn: SIGNED);
209 off = immed_wide_int_const (dc, pointer_mode);
210 }
211 else
212 off = NULL_RTX;
213
214 if (!really_expand)
215 {
216 unsigned int templ_index
217 = TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off);
218
219 if (templ_index >= vec_safe_length (v: mem_addr_template_list))
220 vec_safe_grow_cleared (v&: mem_addr_template_list, len: templ_index + 1, exact: true);
221
222 /* Reuse the templates for addresses, so that we do not waste memory. */
223 templ = &(*mem_addr_template_list)[templ_index];
224 if (!templ->ref)
225 {
226 sym = (addr->symbol ?
227 gen_rtx_SYMBOL_REF (pointer_mode, ggc_strdup ("test_symbol"))
228 : NULL_RTX);
229 bse = (addr->base ?
230 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 1)
231 : NULL_RTX);
232 idx = (addr->index ?
233 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 2)
234 : NULL_RTX);
235
236 gen_addr_rtx (address_mode: pointer_mode, symbol: sym, base: bse, index: idx,
237 step: st? const0_rtx : NULL_RTX,
238 offset: off? const0_rtx : NULL_RTX,
239 addr: &templ->ref,
240 step_p: &templ->step_p,
241 offset_p: &templ->off_p);
242 }
243
244 if (st)
245 *templ->step_p = st;
246 if (off)
247 *templ->off_p = off;
248
249 return templ->ref;
250 }
251
252 /* Otherwise really expand the expressions. */
253 sym = (addr->symbol
254 ? expand_expr (exp: addr->symbol, NULL_RTX, mode: pointer_mode, modifier: EXPAND_NORMAL)
255 : NULL_RTX);
256 bse = (addr->base
257 ? expand_expr (exp: addr->base, NULL_RTX, mode: pointer_mode, modifier: EXPAND_NORMAL)
258 : NULL_RTX);
259 idx = (addr->index
260 ? expand_expr (exp: addr->index, NULL_RTX, mode: pointer_mode, modifier: EXPAND_NORMAL)
261 : NULL_RTX);
262
263 /* addr->base could be an SSA_NAME that was set to a constant value. The
264 call to expand_expr may expose that constant. If so, fold the value
265 into OFF and clear BSE. Otherwise we may later try to pull a mode from
266 BSE to generate a REG, which won't work with constants because they
267 are modeless. */
268 if (bse && GET_CODE (bse) == CONST_INT)
269 {
270 if (off)
271 off = simplify_gen_binary (code: PLUS, mode: pointer_mode, op0: bse, op1: off);
272 else
273 off = bse;
274 gcc_assert (GET_CODE (off) == CONST_INT);
275 bse = NULL_RTX;
276 }
277 gen_addr_rtx (address_mode: pointer_mode, symbol: sym, base: bse, index: idx, step: st, offset: off, addr: &address, NULL, NULL);
278 if (pointer_mode != address_mode)
279 address = convert_memory_address (address_mode, address);
280 return address;
281}
282
283/* implement addr_for_mem_ref() directly from a tree, which avoids exporting
284 the mem_address structure. */
285
286rtx
287addr_for_mem_ref (tree exp, addr_space_t as, bool really_expand)
288{
289 struct mem_address addr;
290 get_address_description (exp, &addr);
291 return addr_for_mem_ref (addr: &addr, as, really_expand);
292}
293
294/* Returns address of MEM_REF in TYPE. */
295
296tree
297tree_mem_ref_addr (tree type, tree mem_ref)
298{
299 tree addr;
300 tree act_elem;
301 tree step = TMR_STEP (mem_ref), offset = TMR_OFFSET (mem_ref);
302 tree addr_base = NULL_TREE, addr_off = NULL_TREE;
303
304 addr_base = fold_convert (type, TMR_BASE (mem_ref));
305
306 act_elem = TMR_INDEX (mem_ref);
307 if (act_elem)
308 {
309 if (step)
310 act_elem = fold_build2 (MULT_EXPR, TREE_TYPE (act_elem),
311 act_elem, step);
312 addr_off = act_elem;
313 }
314
315 act_elem = TMR_INDEX2 (mem_ref);
316 if (act_elem)
317 {
318 if (addr_off)
319 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off),
320 addr_off, act_elem);
321 else
322 addr_off = act_elem;
323 }
324
325 if (offset && !integer_zerop (offset))
326 {
327 if (addr_off)
328 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off), addr_off,
329 fold_convert (TREE_TYPE (addr_off), offset));
330 else
331 addr_off = offset;
332 }
333
334 if (addr_off)
335 addr = fold_build_pointer_plus (addr_base, addr_off);
336 else
337 addr = addr_base;
338
339 return addr;
340}
341
342/* Returns true if a memory reference in MODE and with parameters given by
343 ADDR is valid on the current target. */
344
345bool
346valid_mem_ref_p (machine_mode mode, addr_space_t as,
347 struct mem_address *addr, code_helper ch)
348{
349 rtx address;
350
351 address = addr_for_mem_ref (addr, as, really_expand: false);
352 if (!address)
353 return false;
354
355 return memory_address_addr_space_p (mode, address, as, ch);
356}
357
358/* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
359 is valid on the current target and if so, creates and returns the
360 TARGET_MEM_REF. If VERIFY is false omit the verification step. */
361
362static tree
363create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr,
364 bool verify)
365{
366 tree base, index2;
367
368 if (verify
369 && !valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr))
370 return NULL_TREE;
371
372 if (addr->offset)
373 addr->offset = fold_convert (alias_ptr_type, addr->offset);
374 else
375 addr->offset = build_int_cst (alias_ptr_type, 0);
376
377 if (addr->symbol)
378 {
379 base = addr->symbol;
380 index2 = addr->base;
381 }
382 else if (addr->base
383 && POINTER_TYPE_P (TREE_TYPE (addr->base)))
384 {
385 base = addr->base;
386 index2 = NULL_TREE;
387 }
388 else
389 {
390 base = build_int_cst (build_pointer_type (type), 0);
391 index2 = addr->base;
392 }
393
394 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
395 ??? As IVOPTs does not follow restrictions to where the base
396 pointer may point to create a MEM_REF only if we know that
397 base is valid. */
398 if ((TREE_CODE (base) == ADDR_EXPR || TREE_CODE (base) == INTEGER_CST)
399 && (!index2 || integer_zerop (index2))
400 && (!addr->index || integer_zerop (addr->index)))
401 return fold_build2 (MEM_REF, type, base, addr->offset);
402
403 return build5 (TARGET_MEM_REF, type,
404 base, addr->offset, addr->index, addr->step, index2);
405}
406
407/* Returns true if OBJ is an object whose address is a link time constant. */
408
409static bool
410fixed_address_object_p (tree obj)
411{
412 return (VAR_P (obj)
413 && (TREE_STATIC (obj) || DECL_EXTERNAL (obj))
414 && ! DECL_DLLIMPORT_P (obj));
415}
416
417/* If ADDR contains an address of object that is a link time constant,
418 move it to PARTS->symbol. */
419
420void
421move_fixed_address_to_symbol (struct mem_address *parts, aff_tree *addr)
422{
423 unsigned i;
424 tree val = NULL_TREE;
425
426 for (i = 0; i < addr->n; i++)
427 {
428 if (addr->elts[i].coef != 1)
429 continue;
430
431 val = addr->elts[i].val;
432 if (TREE_CODE (val) == ADDR_EXPR
433 && fixed_address_object_p (TREE_OPERAND (val, 0)))
434 break;
435 }
436
437 if (i == addr->n)
438 return;
439
440 parts->symbol = val;
441 aff_combination_remove_elt (addr, i);
442}
443
444/* Return true if ADDR contains an instance of BASE_HINT and it's moved to
445 PARTS->base. */
446
447static bool
448move_hint_to_base (tree type, struct mem_address *parts, tree base_hint,
449 aff_tree *addr)
450{
451 unsigned i;
452 tree val = NULL_TREE;
453 int qual;
454
455 for (i = 0; i < addr->n; i++)
456 {
457 if (addr->elts[i].coef != 1)
458 continue;
459
460 val = addr->elts[i].val;
461 if (operand_equal_p (val, base_hint, flags: 0))
462 break;
463 }
464
465 if (i == addr->n)
466 return false;
467
468 /* Cast value to appropriate pointer type. We cannot use a pointer
469 to TYPE directly, as the back-end will assume registers of pointer
470 type are aligned, and just the base itself may not actually be.
471 We use void pointer to the type's address space instead. */
472 qual = ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type));
473 type = build_qualified_type (void_type_node, qual);
474 parts->base = fold_convert (build_pointer_type (type), val);
475 aff_combination_remove_elt (addr, i);
476 return true;
477}
478
479/* If ADDR contains an address of a dereferenced pointer, move it to
480 PARTS->base. */
481
482static void
483move_pointer_to_base (struct mem_address *parts, aff_tree *addr)
484{
485 unsigned i;
486 tree val = NULL_TREE;
487
488 for (i = 0; i < addr->n; i++)
489 {
490 if (addr->elts[i].coef != 1)
491 continue;
492
493 val = addr->elts[i].val;
494 if (POINTER_TYPE_P (TREE_TYPE (val)))
495 break;
496 }
497
498 if (i == addr->n)
499 return;
500
501 parts->base = val;
502 aff_combination_remove_elt (addr, i);
503}
504
505/* Moves the loop variant part V in linear address ADDR to be the index
506 of PARTS. */
507
508static void
509move_variant_to_index (struct mem_address *parts, aff_tree *addr, tree v)
510{
511 unsigned i;
512 tree val = NULL_TREE;
513
514 gcc_assert (!parts->index);
515 for (i = 0; i < addr->n; i++)
516 {
517 val = addr->elts[i].val;
518 if (operand_equal_p (val, v, flags: 0))
519 break;
520 }
521
522 if (i == addr->n)
523 return;
524
525 parts->index = fold_convert (sizetype, val);
526 parts->step = wide_int_to_tree (sizetype, cst: addr->elts[i].coef);
527 aff_combination_remove_elt (addr, i);
528}
529
530/* Adds ELT to PARTS. */
531
532static void
533add_to_parts (struct mem_address *parts, tree elt)
534{
535 tree type;
536
537 if (!parts->index)
538 {
539 parts->index = fold_convert (sizetype, elt);
540 return;
541 }
542
543 if (!parts->base)
544 {
545 parts->base = elt;
546 return;
547 }
548
549 /* Add ELT to base. */
550 type = TREE_TYPE (parts->base);
551 if (POINTER_TYPE_P (type))
552 parts->base = fold_build_pointer_plus (parts->base, elt);
553 else
554 parts->base = fold_build2 (PLUS_EXPR, type, parts->base, elt);
555}
556
557/* Returns true if multiplying by RATIO is allowed in an address. Test the
558 validity for a memory reference accessing memory of mode MODE in address
559 space AS. */
560
561static bool
562multiplier_allowed_in_address_p (HOST_WIDE_INT ratio, machine_mode mode,
563 addr_space_t as)
564{
565#define MAX_RATIO 128
566 unsigned int data_index = (int) as * MAX_MACHINE_MODE + (int) mode;
567 static vec<sbitmap> valid_mult_list;
568 sbitmap valid_mult;
569
570 if (data_index >= valid_mult_list.length ())
571 valid_mult_list.safe_grow_cleared (len: data_index + 1, exact: true);
572
573 valid_mult = valid_mult_list[data_index];
574 if (!valid_mult)
575 {
576 machine_mode address_mode = targetm.addr_space.address_mode (as);
577 rtx reg1 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 1);
578 rtx reg2 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 2);
579 rtx addr, scaled;
580 HOST_WIDE_INT i;
581
582 valid_mult = sbitmap_alloc (2 * MAX_RATIO + 1);
583 bitmap_clear (valid_mult);
584 scaled = gen_rtx_fmt_ee (MULT, address_mode, reg1, NULL_RTX);
585 addr = gen_rtx_fmt_ee (PLUS, address_mode, scaled, reg2);
586 for (i = -MAX_RATIO; i <= MAX_RATIO; i++)
587 {
588 XEXP (scaled, 1) = gen_int_mode (i, address_mode);
589 if (memory_address_addr_space_p (mode, addr, as)
590 || memory_address_addr_space_p (mode, scaled, as))
591 bitmap_set_bit (map: valid_mult, bitno: i + MAX_RATIO);
592 }
593
594 if (dump_file && (dump_flags & TDF_DETAILS))
595 {
596 fprintf (stream: dump_file, format: " allowed multipliers:");
597 for (i = -MAX_RATIO; i <= MAX_RATIO; i++)
598 if (bitmap_bit_p (map: valid_mult, bitno: i + MAX_RATIO))
599 fprintf (stream: dump_file, format: " %d", (int) i);
600 fprintf (stream: dump_file, format: "\n");
601 fprintf (stream: dump_file, format: "\n");
602 }
603
604 valid_mult_list[data_index] = valid_mult;
605 }
606
607 if (ratio > MAX_RATIO || ratio < -MAX_RATIO)
608 return false;
609
610 return bitmap_bit_p (map: valid_mult, bitno: ratio + MAX_RATIO);
611}
612
613/* Finds the most expensive multiplication in ADDR that can be
614 expressed in an addressing mode and move the corresponding
615 element(s) to PARTS. */
616
617static void
618most_expensive_mult_to_index (tree type, struct mem_address *parts,
619 aff_tree *addr, bool speed)
620{
621 addr_space_t as = TYPE_ADDR_SPACE (type);
622 machine_mode address_mode = targetm.addr_space.address_mode (as);
623 HOST_WIDE_INT coef;
624 unsigned best_mult_cost = 0, acost;
625 tree mult_elt = NULL_TREE, elt;
626 unsigned i, j;
627 enum tree_code op_code;
628
629 offset_int best_mult = 0;
630 for (i = 0; i < addr->n; i++)
631 {
632 if (!wi::fits_shwi_p (x: addr->elts[i].coef))
633 continue;
634
635 coef = addr->elts[i].coef.to_shwi ();
636 if (coef == 1
637 || !multiplier_allowed_in_address_p (ratio: coef, TYPE_MODE (type), as))
638 continue;
639
640 acost = mult_by_coeff_cost (coef, address_mode, speed);
641
642 if (acost > best_mult_cost)
643 {
644 best_mult_cost = acost;
645 best_mult = offset_int::from (x: addr->elts[i].coef, sgn: SIGNED);
646 }
647 }
648
649 if (!best_mult_cost)
650 return;
651
652 /* Collect elements multiplied by best_mult. */
653 for (i = j = 0; i < addr->n; i++)
654 {
655 offset_int amult = offset_int::from (x: addr->elts[i].coef, sgn: SIGNED);
656 offset_int amult_neg = -wi::sext (x: amult, TYPE_PRECISION (addr->type));
657
658 if (amult == best_mult)
659 op_code = PLUS_EXPR;
660 else if (amult_neg == best_mult)
661 op_code = MINUS_EXPR;
662 else
663 {
664 addr->elts[j] = addr->elts[i];
665 j++;
666 continue;
667 }
668
669 elt = fold_convert (sizetype, addr->elts[i].val);
670 if (mult_elt)
671 mult_elt = fold_build2 (op_code, sizetype, mult_elt, elt);
672 else if (op_code == PLUS_EXPR)
673 mult_elt = elt;
674 else
675 mult_elt = fold_build1 (NEGATE_EXPR, sizetype, elt);
676 }
677 addr->n = j;
678
679 parts->index = mult_elt;
680 parts->step = wide_int_to_tree (sizetype, cst: best_mult);
681}
682
683/* Splits address ADDR for a memory access of type TYPE into PARTS.
684 If BASE_HINT is non-NULL, it specifies an SSA name to be used
685 preferentially as base of the reference, and IV_CAND is the selected
686 iv candidate used in ADDR. Store true to VAR_IN_BASE if variant
687 part of address is split to PARTS.base.
688
689 TODO -- be more clever about the distribution of the elements of ADDR
690 to PARTS. Some architectures do not support anything but single
691 register in address, possibly with a small integer offset; while
692 create_mem_ref will simplify the address to an acceptable shape
693 later, it would be more efficient to know that asking for complicated
694 addressing modes is useless. */
695
696static void
697addr_to_parts (tree type, aff_tree *addr, tree iv_cand, tree base_hint,
698 struct mem_address *parts, bool *var_in_base, bool speed)
699{
700 tree part;
701 unsigned i;
702
703 parts->symbol = NULL_TREE;
704 parts->base = NULL_TREE;
705 parts->index = NULL_TREE;
706 parts->step = NULL_TREE;
707
708 if (maybe_ne (a: addr->offset, b: 0))
709 parts->offset = wide_int_to_tree (sizetype, cst: addr->offset);
710 else
711 parts->offset = NULL_TREE;
712
713 /* Try to find a symbol. */
714 move_fixed_address_to_symbol (parts, addr);
715
716 /* Since at the moment there is no reliable way to know how to
717 distinguish between pointer and its offset, we decide if var
718 part is the pointer based on guess. */
719 *var_in_base = (base_hint != NULL && parts->symbol == NULL);
720 if (*var_in_base)
721 *var_in_base = move_hint_to_base (type, parts, base_hint, addr);
722 else
723 move_variant_to_index (parts, addr, v: iv_cand);
724
725 /* First move the most expensive feasible multiplication to index. */
726 if (!parts->index)
727 most_expensive_mult_to_index (type, parts, addr, speed);
728
729 /* Move pointer into base. */
730 if (!parts->symbol && !parts->base)
731 move_pointer_to_base (parts, addr);
732
733 /* Then try to process the remaining elements. */
734 for (i = 0; i < addr->n; i++)
735 {
736 part = fold_convert (sizetype, addr->elts[i].val);
737 if (addr->elts[i].coef != 1)
738 part = fold_build2 (MULT_EXPR, sizetype, part,
739 wide_int_to_tree (sizetype, addr->elts[i].coef));
740 add_to_parts (parts, elt: part);
741 }
742 if (addr->rest)
743 add_to_parts (parts, fold_convert (sizetype, addr->rest));
744}
745
746/* Force the PARTS to register. */
747
748static void
749gimplify_mem_ref_parts (gimple_stmt_iterator *gsi, struct mem_address *parts)
750{
751 if (parts->base)
752 parts->base = force_gimple_operand_gsi_1 (gsi, parts->base,
753 is_gimple_mem_ref_addr, NULL_TREE,
754 true, GSI_SAME_STMT);
755 if (parts->index)
756 parts->index = force_gimple_operand_gsi (gsi, parts->index,
757 true, NULL_TREE,
758 true, GSI_SAME_STMT);
759}
760
761/* Return true if the OFFSET in PARTS is the only thing that is making
762 it an invalid address for type TYPE. */
763
764static bool
765mem_ref_valid_without_offset_p (tree type, mem_address parts)
766{
767 if (!parts.base)
768 parts.base = parts.offset;
769 parts.offset = NULL_TREE;
770 return valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr: &parts);
771}
772
773/* Fold PARTS->offset into PARTS->base, so that there is no longer
774 a separate offset. Emit any new instructions before GSI. */
775
776static void
777add_offset_to_base (gimple_stmt_iterator *gsi, mem_address *parts)
778{
779 tree tmp = parts->offset;
780 if (parts->base)
781 {
782 tmp = fold_build_pointer_plus (parts->base, tmp);
783 tmp = force_gimple_operand_gsi_1 (gsi, tmp, is_gimple_mem_ref_addr,
784 NULL_TREE, true, GSI_SAME_STMT);
785 }
786 parts->base = tmp;
787 parts->offset = NULL_TREE;
788}
789
790/* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
791 computations are emitted in front of GSI. TYPE is the mode
792 of created memory reference. IV_CAND is the selected iv candidate in ADDR,
793 and BASE_HINT is non NULL if IV_CAND comes from a base address
794 object. */
795
796tree
797create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr,
798 tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed)
799{
800 bool var_in_base;
801 tree mem_ref, tmp;
802 struct mem_address parts;
803
804 addr_to_parts (type, addr, iv_cand, base_hint, parts: &parts, var_in_base: &var_in_base, speed);
805 gimplify_mem_ref_parts (gsi, parts: &parts);
806 mem_ref = create_mem_ref_raw (type, alias_ptr_type, addr: &parts, verify: true);
807 if (mem_ref)
808 return mem_ref;
809
810 /* The expression is too complicated. Try making it simpler. */
811
812 /* Merge symbol into other parts. */
813 if (parts.symbol)
814 {
815 tmp = parts.symbol;
816 parts.symbol = NULL_TREE;
817 gcc_assert (is_gimple_val (tmp));
818
819 if (parts.base)
820 {
821 gcc_assert (useless_type_conversion_p (sizetype,
822 TREE_TYPE (parts.base)));
823
824 if (parts.index)
825 {
826 /* Add the symbol to base, eventually forcing it to register. */
827 tmp = fold_build_pointer_plus (tmp, parts.base);
828 tmp = force_gimple_operand_gsi_1 (gsi, tmp,
829 is_gimple_mem_ref_addr,
830 NULL_TREE, true,
831 GSI_SAME_STMT);
832 }
833 else
834 {
835 /* Move base to index, then move the symbol to base. */
836 parts.index = parts.base;
837 }
838 parts.base = tmp;
839 }
840 else
841 parts.base = tmp;
842
843 mem_ref = create_mem_ref_raw (type, alias_ptr_type, addr: &parts, verify: true);
844 if (mem_ref)
845 return mem_ref;
846 }
847
848 /* Move multiplication to index by transforming address expression:
849 [... + index << step + ...]
850 into:
851 index' = index << step;
852 [... + index' + ,,,]. */
853 if (parts.step && !integer_onep (parts.step))
854 {
855 gcc_assert (parts.index);
856 if (parts.offset && mem_ref_valid_without_offset_p (type, parts))
857 {
858 add_offset_to_base (gsi, parts: &parts);
859 mem_ref = create_mem_ref_raw (type, alias_ptr_type, addr: &parts, verify: true);
860 gcc_assert (mem_ref);
861 return mem_ref;
862 }
863
864 parts.index = force_gimple_operand_gsi (gsi,
865 fold_build2 (MULT_EXPR, sizetype,
866 parts.index, parts.step),
867 true, NULL_TREE, true, GSI_SAME_STMT);
868 parts.step = NULL_TREE;
869
870 mem_ref = create_mem_ref_raw (type, alias_ptr_type, addr: &parts, verify: true);
871 if (mem_ref)
872 return mem_ref;
873 }
874
875 /* Add offset to invariant part by transforming address expression:
876 [base + index + offset]
877 into:
878 base' = base + offset;
879 [base' + index]
880 or:
881 index' = index + offset;
882 [base + index']
883 depending on which one is invariant. */
884 if (parts.offset && !integer_zerop (parts.offset))
885 {
886 tree old_base = unshare_expr (parts.base);
887 tree old_index = unshare_expr (parts.index);
888 tree old_offset = unshare_expr (parts.offset);
889
890 tmp = parts.offset;
891 parts.offset = NULL_TREE;
892 /* Add offset to invariant part. */
893 if (!var_in_base)
894 {
895 if (parts.base)
896 {
897 tmp = fold_build_pointer_plus (parts.base, tmp);
898 tmp = force_gimple_operand_gsi_1 (gsi, tmp,
899 is_gimple_mem_ref_addr,
900 NULL_TREE, true,
901 GSI_SAME_STMT);
902 }
903 parts.base = tmp;
904 }
905 else
906 {
907 if (parts.index)
908 {
909 tmp = fold_build_pointer_plus (parts.index, tmp);
910 tmp = force_gimple_operand_gsi_1 (gsi, tmp,
911 is_gimple_mem_ref_addr,
912 NULL_TREE, true,
913 GSI_SAME_STMT);
914 }
915 parts.index = tmp;
916 }
917
918 mem_ref = create_mem_ref_raw (type, alias_ptr_type, addr: &parts, verify: true);
919 if (mem_ref)
920 return mem_ref;
921
922 /* Restore parts.base, index and offset so that we can check if
923 [base + offset] addressing mode is supported in next step.
924 This is necessary for targets only support [base + offset],
925 but not [base + index] addressing mode. */
926 parts.base = old_base;
927 parts.index = old_index;
928 parts.offset = old_offset;
929 }
930
931 /* Transform [base + index + ...] into:
932 base' = base + index;
933 [base' + ...]. */
934 if (parts.index)
935 {
936 tmp = parts.index;
937 parts.index = NULL_TREE;
938 /* Add index to base. */
939 if (parts.base)
940 {
941 tmp = fold_build_pointer_plus (parts.base, tmp);
942 tmp = force_gimple_operand_gsi_1 (gsi, tmp,
943 is_gimple_mem_ref_addr,
944 NULL_TREE, true, GSI_SAME_STMT);
945 }
946 parts.base = tmp;
947
948 mem_ref = create_mem_ref_raw (type, alias_ptr_type, addr: &parts, verify: true);
949 if (mem_ref)
950 return mem_ref;
951 }
952
953 /* Transform [base + offset] into:
954 base' = base + offset;
955 [base']. */
956 if (parts.offset && !integer_zerop (parts.offset))
957 {
958 add_offset_to_base (gsi, parts: &parts);
959 mem_ref = create_mem_ref_raw (type, alias_ptr_type, addr: &parts, verify: true);
960 if (mem_ref)
961 return mem_ref;
962 }
963
964 /* Verify that the address is in the simplest possible shape
965 (only a register). If we cannot create such a memory reference,
966 something is really wrong. */
967 gcc_assert (parts.symbol == NULL_TREE);
968 gcc_assert (parts.index == NULL_TREE);
969 gcc_assert (!parts.step || integer_onep (parts.step));
970 gcc_assert (!parts.offset || integer_zerop (parts.offset));
971 gcc_unreachable ();
972}
973
974/* Copies components of the address from OP to ADDR. */
975
976void
977get_address_description (tree op, struct mem_address *addr)
978{
979 if (TREE_CODE (TMR_BASE (op)) == ADDR_EXPR)
980 {
981 addr->symbol = TMR_BASE (op);
982 addr->base = TMR_INDEX2 (op);
983 }
984 else
985 {
986 addr->symbol = NULL_TREE;
987 if (TMR_INDEX2 (op))
988 {
989 gcc_assert (integer_zerop (TMR_BASE (op)));
990 addr->base = TMR_INDEX2 (op);
991 }
992 else
993 addr->base = TMR_BASE (op);
994 }
995 addr->index = TMR_INDEX (op);
996 addr->step = TMR_STEP (op);
997 addr->offset = TMR_OFFSET (op);
998}
999
1000/* Copies the reference information from OLD_REF to NEW_REF, where
1001 NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */
1002
1003void
1004copy_ref_info (tree new_ref, tree old_ref)
1005{
1006 tree new_ptr_base = NULL_TREE;
1007
1008 gcc_assert (TREE_CODE (new_ref) == MEM_REF
1009 || TREE_CODE (new_ref) == TARGET_MEM_REF);
1010
1011 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (old_ref);
1012 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (old_ref);
1013
1014 new_ptr_base = TREE_OPERAND (new_ref, 0);
1015
1016 tree base = get_base_address (t: old_ref);
1017 if (!base)
1018 return;
1019
1020 /* We can transfer points-to information from an old pointer
1021 or decl base to the new one. */
1022 if (new_ptr_base
1023 && TREE_CODE (new_ptr_base) == SSA_NAME
1024 && !SSA_NAME_PTR_INFO (new_ptr_base))
1025 {
1026 if ((TREE_CODE (base) == MEM_REF
1027 || TREE_CODE (base) == TARGET_MEM_REF)
1028 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1029 && SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)))
1030 {
1031 duplicate_ssa_name_ptr_info
1032 (new_ptr_base, SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)));
1033 reset_flow_sensitive_info (new_ptr_base);
1034 }
1035 else if (VAR_P (base)
1036 || TREE_CODE (base) == PARM_DECL
1037 || TREE_CODE (base) == RESULT_DECL)
1038 {
1039 struct ptr_info_def *pi = get_ptr_info (new_ptr_base);
1040 pt_solution_set_var (&pi->pt, base);
1041 }
1042 }
1043
1044 /* We can transfer dependence info. */
1045 if (!MR_DEPENDENCE_CLIQUE (new_ref)
1046 && (TREE_CODE (base) == MEM_REF
1047 || TREE_CODE (base) == TARGET_MEM_REF)
1048 && MR_DEPENDENCE_CLIQUE (base))
1049 {
1050 MR_DEPENDENCE_CLIQUE (new_ref) = MR_DEPENDENCE_CLIQUE (base);
1051 MR_DEPENDENCE_BASE (new_ref) = MR_DEPENDENCE_BASE (base);
1052 }
1053
1054 /* And alignment info. Note we cannot transfer misalignment info
1055 since that sits on the SSA name but this is flow-sensitive info
1056 which we cannot transfer in this generic routine. */
1057 unsigned old_align = get_object_alignment (old_ref);
1058 unsigned new_align = get_object_alignment (new_ref);
1059 if (new_align < old_align)
1060 TREE_TYPE (new_ref) = build_aligned_type (TREE_TYPE (new_ref), old_align);
1061}
1062
1063/* Move constants in target_mem_ref REF to offset. Returns the new target
1064 mem ref if anything changes, NULL_TREE otherwise. */
1065
1066tree
1067maybe_fold_tmr (tree ref)
1068{
1069 struct mem_address addr;
1070 bool changed = false;
1071 tree new_ref, off;
1072
1073 get_address_description (op: ref, addr: &addr);
1074
1075 if (addr.base
1076 && TREE_CODE (addr.base) == INTEGER_CST
1077 && !integer_zerop (addr.base))
1078 {
1079 addr.offset = fold_binary_to_constant (PLUS_EXPR,
1080 TREE_TYPE (addr.offset),
1081 addr.offset, addr.base);
1082 addr.base = NULL_TREE;
1083 changed = true;
1084 }
1085
1086 if (addr.symbol
1087 && TREE_CODE (TREE_OPERAND (addr.symbol, 0)) == MEM_REF)
1088 {
1089 addr.offset = fold_binary_to_constant
1090 (PLUS_EXPR, TREE_TYPE (addr.offset),
1091 addr.offset,
1092 TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 1));
1093 addr.symbol = TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 0);
1094 changed = true;
1095 }
1096 else if (addr.symbol
1097 && handled_component_p (TREE_OPERAND (addr.symbol, 0)))
1098 {
1099 poly_int64 offset;
1100 addr.symbol = build_fold_addr_expr
1101 (get_addr_base_and_unit_offset
1102 (TREE_OPERAND (addr.symbol, 0), &offset));
1103 addr.offset = int_const_binop (PLUS_EXPR,
1104 addr.offset, size_int (offset));
1105 changed = true;
1106 }
1107
1108 if (addr.index && TREE_CODE (addr.index) == INTEGER_CST)
1109 {
1110 off = addr.index;
1111 if (addr.step)
1112 {
1113 off = fold_binary_to_constant (MULT_EXPR, sizetype,
1114 off, addr.step);
1115 addr.step = NULL_TREE;
1116 }
1117
1118 addr.offset = fold_binary_to_constant (PLUS_EXPR,
1119 TREE_TYPE (addr.offset),
1120 addr.offset, off);
1121 addr.index = NULL_TREE;
1122 changed = true;
1123 }
1124
1125 if (!changed)
1126 return NULL_TREE;
1127
1128 /* If we have propagated something into this TARGET_MEM_REF and thus
1129 ended up folding it, always create a new TARGET_MEM_REF regardless
1130 if it is valid in this for on the target - the propagation result
1131 wouldn't be anyway. */
1132 new_ref = create_mem_ref_raw (TREE_TYPE (ref),
1133 TREE_TYPE (addr.offset), addr: &addr, verify: false);
1134 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (ref);
1135 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (ref);
1136 return new_ref;
1137}
1138
1139/* Return the preferred index scale factor for accessing memory of mode
1140 MEM_MODE in the address space of pointer BASE. Assume that we're
1141 optimizing for speed if SPEED is true and for size otherwise. */
1142unsigned int
1143preferred_mem_scale_factor (tree base, machine_mode mem_mode,
1144 bool speed)
1145{
1146 /* For BLKmode, we can't do anything so return 1. */
1147 if (mem_mode == BLKmode)
1148 return 1;
1149
1150 struct mem_address parts = {};
1151 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1152 unsigned int fact = GET_MODE_UNIT_SIZE (mem_mode);
1153
1154 /* Addressing mode "base + index". */
1155 parts.index = integer_one_node;
1156 parts.base = integer_one_node;
1157 rtx addr = addr_for_mem_ref (addr: &parts, as, really_expand: false);
1158 unsigned cost = address_cost (addr, mem_mode, as, speed);
1159
1160 /* Addressing mode "base + index << scale". */
1161 parts.step = wide_int_to_tree (sizetype, cst: fact);
1162 addr = addr_for_mem_ref (addr: &parts, as, really_expand: false);
1163 unsigned new_cost = address_cost (addr, mem_mode, as, speed);
1164
1165 /* Compare the cost of an address with an unscaled index with
1166 a scaled index and return factor if useful. */
1167 if (new_cost < cost)
1168 return GET_MODE_UNIT_SIZE (mem_mode);
1169 return 1;
1170}
1171
1172/* Dump PARTS to FILE. */
1173
1174extern void dump_mem_address (FILE *, struct mem_address *);
1175void
1176dump_mem_address (FILE *file, struct mem_address *parts)
1177{
1178 if (parts->symbol)
1179 {
1180 fprintf (stream: file, format: "symbol: ");
1181 print_generic_expr (file, TREE_OPERAND (parts->symbol, 0), TDF_SLIM);
1182 fprintf (stream: file, format: "\n");
1183 }
1184 if (parts->base)
1185 {
1186 fprintf (stream: file, format: "base: ");
1187 print_generic_expr (file, parts->base, TDF_SLIM);
1188 fprintf (stream: file, format: "\n");
1189 }
1190 if (parts->index)
1191 {
1192 fprintf (stream: file, format: "index: ");
1193 print_generic_expr (file, parts->index, TDF_SLIM);
1194 fprintf (stream: file, format: "\n");
1195 }
1196 if (parts->step)
1197 {
1198 fprintf (stream: file, format: "step: ");
1199 print_generic_expr (file, parts->step, TDF_SLIM);
1200 fprintf (stream: file, format: "\n");
1201 }
1202 if (parts->offset)
1203 {
1204 fprintf (stream: file, format: "offset: ");
1205 print_generic_expr (file, parts->offset, TDF_SLIM);
1206 fprintf (stream: file, format: "\n");
1207 }
1208}
1209
1210#include "gt-tree-ssa-address.h"
1211

source code of gcc/tree-ssa-address.cc