1/* Memory address lowering and addressing mode selection.
2 Copyright (C) 2004-2017 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it
7under the terms of the GNU General Public License as published by the
8Free Software Foundation; either version 3, or (at your option) any
9later version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT
12ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20/* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
21 that directly map to addressing modes of the target. */
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
26#include "backend.h"
27#include "target.h"
28#include "rtl.h"
29#include "tree.h"
30#include "gimple.h"
31#include "memmodel.h"
32#include "stringpool.h"
33#include "tree-vrp.h"
34#include "tree-ssanames.h"
35#include "expmed.h"
36#include "insn-config.h"
37#include "emit-rtl.h"
38#include "recog.h"
39#include "tree-pretty-print.h"
40#include "fold-const.h"
41#include "stor-layout.h"
42#include "gimple-iterator.h"
43#include "gimplify-me.h"
44#include "tree-ssa-loop-ivopts.h"
45#include "expr.h"
46#include "tree-dfa.h"
47#include "dumpfile.h"
48#include "tree-affine.h"
49#include "gimplify.h"
50
51/* FIXME: We compute address costs using RTL. */
52#include "tree-ssa-address.h"
53
54/* TODO -- handling of symbols (according to Richard Hendersons
55 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
56
57 There are at least 5 different kinds of symbols that we can run up against:
58
59 (1) binds_local_p, small data area.
60 (2) binds_local_p, eg local statics
61 (3) !binds_local_p, eg global variables
62 (4) thread local, local_exec
63 (5) thread local, !local_exec
64
65 Now, (1) won't appear often in an array context, but it certainly can.
66 All you have to do is set -GN high enough, or explicitly mark any
67 random object __attribute__((section (".sdata"))).
68
69 All of these affect whether or not a symbol is in fact a valid address.
70 The only one tested here is (3). And that result may very well
71 be incorrect for (4) or (5).
72
73 An incorrect result here does not cause incorrect results out the
74 back end, because the expander in expr.c validizes the address. However
75 it would be nice to improve the handling here in order to produce more
76 precise results. */
77
78/* A "template" for memory address, used to determine whether the address is
79 valid for mode. */
80
81struct GTY (()) mem_addr_template {
82 rtx ref; /* The template. */
83 rtx * GTY ((skip)) step_p; /* The point in template where the step should be
84 filled in. */
85 rtx * GTY ((skip)) off_p; /* The point in template where the offset should
86 be filled in. */
87};
88
89
90/* The templates. Each of the low five bits of the index corresponds to one
91 component of TARGET_MEM_REF being present, while the high bits identify
92 the address space. See TEMPL_IDX. */
93
94static GTY(()) vec<mem_addr_template, va_gc> *mem_addr_template_list;
95
96#define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
97 (((int) (AS) << 5) \
98 | ((SYMBOL != 0) << 4) \
99 | ((BASE != 0) << 3) \
100 | ((INDEX != 0) << 2) \
101 | ((STEP != 0) << 1) \
102 | (OFFSET != 0))
103
104/* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
105 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
106 to where step is placed to *STEP_P and offset to *OFFSET_P. */
107
108static void
109gen_addr_rtx (machine_mode address_mode,
110 rtx symbol, rtx base, rtx index, rtx step, rtx offset,
111 rtx *addr, rtx **step_p, rtx **offset_p)
112{
113 rtx act_elem;
114
115 *addr = NULL_RTX;
116 if (step_p)
117 *step_p = NULL;
118 if (offset_p)
119 *offset_p = NULL;
120
121 if (index && index != const0_rtx)
122 {
123 act_elem = index;
124 if (step)
125 {
126 act_elem = gen_rtx_MULT (address_mode, act_elem, step);
127
128 if (step_p)
129 *step_p = &XEXP (act_elem, 1);
130 }
131
132 *addr = act_elem;
133 }
134
135 if (base && base != const0_rtx)
136 {
137 if (*addr)
138 *addr = simplify_gen_binary (PLUS, address_mode, base, *addr);
139 else
140 *addr = base;
141 }
142
143 if (symbol)
144 {
145 act_elem = symbol;
146 if (offset)
147 {
148 act_elem = gen_rtx_PLUS (address_mode, act_elem, offset);
149
150 if (offset_p)
151 *offset_p = &XEXP (act_elem, 1);
152
153 if (GET_CODE (symbol) == SYMBOL_REF
154 || GET_CODE (symbol) == LABEL_REF
155 || GET_CODE (symbol) == CONST)
156 act_elem = gen_rtx_CONST (address_mode, act_elem);
157 }
158
159 if (*addr)
160 *addr = gen_rtx_PLUS (address_mode, *addr, act_elem);
161 else
162 *addr = act_elem;
163 }
164 else if (offset)
165 {
166 if (*addr)
167 {
168 *addr = gen_rtx_PLUS (address_mode, *addr, offset);
169 if (offset_p)
170 *offset_p = &XEXP (*addr, 1);
171 }
172 else
173 {
174 *addr = offset;
175 if (offset_p)
176 *offset_p = addr;
177 }
178 }
179
180 if (!*addr)
181 *addr = const0_rtx;
182}
183
184/* Returns address for TARGET_MEM_REF with parameters given by ADDR
185 in address space AS.
186 If REALLY_EXPAND is false, just make fake registers instead
187 of really expanding the operands, and perform the expansion in-place
188 by using one of the "templates". */
189
190rtx
191addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
192 bool really_expand)
193{
194 scalar_int_mode address_mode = targetm.addr_space.address_mode (as);
195 scalar_int_mode pointer_mode = targetm.addr_space.pointer_mode (as);
196 rtx address, sym, bse, idx, st, off;
197 struct mem_addr_template *templ;
198
199 if (addr->step && !integer_onep (addr->step))
200 st = immed_wide_int_const (wi::to_wide (addr->step), pointer_mode);
201 else
202 st = NULL_RTX;
203
204 if (addr->offset && !integer_zerop (addr->offset))
205 {
206 offset_int dc = offset_int::from (wi::to_wide (addr->offset), SIGNED);
207 off = immed_wide_int_const (dc, pointer_mode);
208 }
209 else
210 off = NULL_RTX;
211
212 if (!really_expand)
213 {
214 unsigned int templ_index
215 = TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off);
216
217 if (templ_index >= vec_safe_length (mem_addr_template_list))
218 vec_safe_grow_cleared (mem_addr_template_list, templ_index + 1);
219
220 /* Reuse the templates for addresses, so that we do not waste memory. */
221 templ = &(*mem_addr_template_list)[templ_index];
222 if (!templ->ref)
223 {
224 sym = (addr->symbol ?
225 gen_rtx_SYMBOL_REF (pointer_mode, ggc_strdup ("test_symbol"))
226 : NULL_RTX);
227 bse = (addr->base ?
228 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 1)
229 : NULL_RTX);
230 idx = (addr->index ?
231 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 2)
232 : NULL_RTX);
233
234 gen_addr_rtx (pointer_mode, sym, bse, idx,
235 st? const0_rtx : NULL_RTX,
236 off? const0_rtx : NULL_RTX,
237 &templ->ref,
238 &templ->step_p,
239 &templ->off_p);
240 }
241
242 if (st)
243 *templ->step_p = st;
244 if (off)
245 *templ->off_p = off;
246
247 return templ->ref;
248 }
249
250 /* Otherwise really expand the expressions. */
251 sym = (addr->symbol
252 ? expand_expr (addr->symbol, NULL_RTX, pointer_mode, EXPAND_NORMAL)
253 : NULL_RTX);
254 bse = (addr->base
255 ? expand_expr (addr->base, NULL_RTX, pointer_mode, EXPAND_NORMAL)
256 : NULL_RTX);
257 idx = (addr->index
258 ? expand_expr (addr->index, NULL_RTX, pointer_mode, EXPAND_NORMAL)
259 : NULL_RTX);
260
261 gen_addr_rtx (pointer_mode, sym, bse, idx, st, off, &address, NULL, NULL);
262 if (pointer_mode != address_mode)
263 address = convert_memory_address (address_mode, address);
264 return address;
265}
266
267/* implement addr_for_mem_ref() directly from a tree, which avoids exporting
268 the mem_address structure. */
269
270rtx
271addr_for_mem_ref (tree exp, addr_space_t as, bool really_expand)
272{
273 struct mem_address addr;
274 get_address_description (exp, &addr);
275 return addr_for_mem_ref (&addr, as, really_expand);
276}
277
278/* Returns address of MEM_REF in TYPE. */
279
280tree
281tree_mem_ref_addr (tree type, tree mem_ref)
282{
283 tree addr;
284 tree act_elem;
285 tree step = TMR_STEP (mem_ref), offset = TMR_OFFSET (mem_ref);
286 tree addr_base = NULL_TREE, addr_off = NULL_TREE;
287
288 addr_base = fold_convert (type, TMR_BASE (mem_ref));
289
290 act_elem = TMR_INDEX (mem_ref);
291 if (act_elem)
292 {
293 if (step)
294 act_elem = fold_build2 (MULT_EXPR, TREE_TYPE (act_elem),
295 act_elem, step);
296 addr_off = act_elem;
297 }
298
299 act_elem = TMR_INDEX2 (mem_ref);
300 if (act_elem)
301 {
302 if (addr_off)
303 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off),
304 addr_off, act_elem);
305 else
306 addr_off = act_elem;
307 }
308
309 if (offset && !integer_zerop (offset))
310 {
311 if (addr_off)
312 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off), addr_off,
313 fold_convert (TREE_TYPE (addr_off), offset));
314 else
315 addr_off = offset;
316 }
317
318 if (addr_off)
319 addr = fold_build_pointer_plus (addr_base, addr_off);
320 else
321 addr = addr_base;
322
323 return addr;
324}
325
326/* Returns true if a memory reference in MODE and with parameters given by
327 ADDR is valid on the current target. */
328
329bool
330valid_mem_ref_p (machine_mode mode, addr_space_t as,
331 struct mem_address *addr)
332{
333 rtx address;
334
335 address = addr_for_mem_ref (addr, as, false);
336 if (!address)
337 return false;
338
339 return memory_address_addr_space_p (mode, address, as);
340}
341
342/* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
343 is valid on the current target and if so, creates and returns the
344 TARGET_MEM_REF. If VERIFY is false omit the verification step. */
345
346static tree
347create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr,
348 bool verify)
349{
350 tree base, index2;
351
352 if (verify
353 && !valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr))
354 return NULL_TREE;
355
356 if (addr->step && integer_onep (addr->step))
357 addr->step = NULL_TREE;
358
359 if (addr->offset)
360 addr->offset = fold_convert (alias_ptr_type, addr->offset);
361 else
362 addr->offset = build_int_cst (alias_ptr_type, 0);
363
364 if (addr->symbol)
365 {
366 base = addr->symbol;
367 index2 = addr->base;
368 }
369 else if (addr->base
370 && POINTER_TYPE_P (TREE_TYPE (addr->base)))
371 {
372 base = addr->base;
373 index2 = NULL_TREE;
374 }
375 else
376 {
377 base = build_int_cst (build_pointer_type (type), 0);
378 index2 = addr->base;
379 }
380
381 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
382 ??? As IVOPTs does not follow restrictions to where the base
383 pointer may point to create a MEM_REF only if we know that
384 base is valid. */
385 if ((TREE_CODE (base) == ADDR_EXPR || TREE_CODE (base) == INTEGER_CST)
386 && (!index2 || integer_zerop (index2))
387 && (!addr->index || integer_zerop (addr->index)))
388 return fold_build2 (MEM_REF, type, base, addr->offset);
389
390 return build5 (TARGET_MEM_REF, type,
391 base, addr->offset, addr->index, addr->step, index2);
392}
393
394/* Returns true if OBJ is an object whose address is a link time constant. */
395
396static bool
397fixed_address_object_p (tree obj)
398{
399 return (VAR_P (obj)
400 && (TREE_STATIC (obj) || DECL_EXTERNAL (obj))
401 && ! DECL_DLLIMPORT_P (obj));
402}
403
404/* If ADDR contains an address of object that is a link time constant,
405 move it to PARTS->symbol. */
406
407void
408move_fixed_address_to_symbol (struct mem_address *parts, aff_tree *addr)
409{
410 unsigned i;
411 tree val = NULL_TREE;
412
413 for (i = 0; i < addr->n; i++)
414 {
415 if (addr->elts[i].coef != 1)
416 continue;
417
418 val = addr->elts[i].val;
419 if (TREE_CODE (val) == ADDR_EXPR
420 && fixed_address_object_p (TREE_OPERAND (val, 0)))
421 break;
422 }
423
424 if (i == addr->n)
425 return;
426
427 parts->symbol = val;
428 aff_combination_remove_elt (addr, i);
429}
430
431/* Return true if ADDR contains an instance of BASE_HINT and it's moved to
432 PARTS->base. */
433
434static bool
435move_hint_to_base (tree type, struct mem_address *parts, tree base_hint,
436 aff_tree *addr)
437{
438 unsigned i;
439 tree val = NULL_TREE;
440 int qual;
441
442 for (i = 0; i < addr->n; i++)
443 {
444 if (addr->elts[i].coef != 1)
445 continue;
446
447 val = addr->elts[i].val;
448 if (operand_equal_p (val, base_hint, 0))
449 break;
450 }
451
452 if (i == addr->n)
453 return false;
454
455 /* Cast value to appropriate pointer type. We cannot use a pointer
456 to TYPE directly, as the back-end will assume registers of pointer
457 type are aligned, and just the base itself may not actually be.
458 We use void pointer to the type's address space instead. */
459 qual = ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type));
460 type = build_qualified_type (void_type_node, qual);
461 parts->base = fold_convert (build_pointer_type (type), val);
462 aff_combination_remove_elt (addr, i);
463 return true;
464}
465
466/* If ADDR contains an address of a dereferenced pointer, move it to
467 PARTS->base. */
468
469static void
470move_pointer_to_base (struct mem_address *parts, aff_tree *addr)
471{
472 unsigned i;
473 tree val = NULL_TREE;
474
475 for (i = 0; i < addr->n; i++)
476 {
477 if (addr->elts[i].coef != 1)
478 continue;
479
480 val = addr->elts[i].val;
481 if (POINTER_TYPE_P (TREE_TYPE (val)))
482 break;
483 }
484
485 if (i == addr->n)
486 return;
487
488 parts->base = val;
489 aff_combination_remove_elt (addr, i);
490}
491
492/* Moves the loop variant part V in linear address ADDR to be the index
493 of PARTS. */
494
495static void
496move_variant_to_index (struct mem_address *parts, aff_tree *addr, tree v)
497{
498 unsigned i;
499 tree val = NULL_TREE;
500
501 gcc_assert (!parts->index);
502 for (i = 0; i < addr->n; i++)
503 {
504 val = addr->elts[i].val;
505 if (operand_equal_p (val, v, 0))
506 break;
507 }
508
509 if (i == addr->n)
510 return;
511
512 parts->index = fold_convert (sizetype, val);
513 parts->step = wide_int_to_tree (sizetype, addr->elts[i].coef);
514 aff_combination_remove_elt (addr, i);
515}
516
517/* Adds ELT to PARTS. */
518
519static void
520add_to_parts (struct mem_address *parts, tree elt)
521{
522 tree type;
523
524 if (!parts->index)
525 {
526 parts->index = fold_convert (sizetype, elt);
527 return;
528 }
529
530 if (!parts->base)
531 {
532 parts->base = elt;
533 return;
534 }
535
536 /* Add ELT to base. */
537 type = TREE_TYPE (parts->base);
538 if (POINTER_TYPE_P (type))
539 parts->base = fold_build_pointer_plus (parts->base, elt);
540 else
541 parts->base = fold_build2 (PLUS_EXPR, type, parts->base, elt);
542}
543
544/* Returns true if multiplying by RATIO is allowed in an address. Test the
545 validity for a memory reference accessing memory of mode MODE in address
546 space AS. */
547
548static bool
549multiplier_allowed_in_address_p (HOST_WIDE_INT ratio, machine_mode mode,
550 addr_space_t as)
551{
552#define MAX_RATIO 128
553 unsigned int data_index = (int) as * MAX_MACHINE_MODE + (int) mode;
554 static vec<sbitmap> valid_mult_list;
555 sbitmap valid_mult;
556
557 if (data_index >= valid_mult_list.length ())
558 valid_mult_list.safe_grow_cleared (data_index + 1);
559
560 valid_mult = valid_mult_list[data_index];
561 if (!valid_mult)
562 {
563 machine_mode address_mode = targetm.addr_space.address_mode (as);
564 rtx reg1 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 1);
565 rtx reg2 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 2);
566 rtx addr, scaled;
567 HOST_WIDE_INT i;
568
569 valid_mult = sbitmap_alloc (2 * MAX_RATIO + 1);
570 bitmap_clear (valid_mult);
571 scaled = gen_rtx_fmt_ee (MULT, address_mode, reg1, NULL_RTX);
572 addr = gen_rtx_fmt_ee (PLUS, address_mode, scaled, reg2);
573 for (i = -MAX_RATIO; i <= MAX_RATIO; i++)
574 {
575 XEXP (scaled, 1) = gen_int_mode (i, address_mode);
576 if (memory_address_addr_space_p (mode, addr, as)
577 || memory_address_addr_space_p (mode, scaled, as))
578 bitmap_set_bit (valid_mult, i + MAX_RATIO);
579 }
580
581 if (dump_file && (dump_flags & TDF_DETAILS))
582 {
583 fprintf (dump_file, " allowed multipliers:");
584 for (i = -MAX_RATIO; i <= MAX_RATIO; i++)
585 if (bitmap_bit_p (valid_mult, i + MAX_RATIO))
586 fprintf (dump_file, " %d", (int) i);
587 fprintf (dump_file, "\n");
588 fprintf (dump_file, "\n");
589 }
590
591 valid_mult_list[data_index] = valid_mult;
592 }
593
594 if (ratio > MAX_RATIO || ratio < -MAX_RATIO)
595 return false;
596
597 return bitmap_bit_p (valid_mult, ratio + MAX_RATIO);
598}
599
600/* Finds the most expensive multiplication in ADDR that can be
601 expressed in an addressing mode and move the corresponding
602 element(s) to PARTS. */
603
604static void
605most_expensive_mult_to_index (tree type, struct mem_address *parts,
606 aff_tree *addr, bool speed)
607{
608 addr_space_t as = TYPE_ADDR_SPACE (type);
609 machine_mode address_mode = targetm.addr_space.address_mode (as);
610 HOST_WIDE_INT coef;
611 unsigned best_mult_cost = 0, acost;
612 tree mult_elt = NULL_TREE, elt;
613 unsigned i, j;
614 enum tree_code op_code;
615
616 offset_int best_mult = 0;
617 for (i = 0; i < addr->n; i++)
618 {
619 if (!wi::fits_shwi_p (addr->elts[i].coef))
620 continue;
621
622 coef = addr->elts[i].coef.to_shwi ();
623 if (coef == 1
624 || !multiplier_allowed_in_address_p (coef, TYPE_MODE (type), as))
625 continue;
626
627 acost = mult_by_coeff_cost (coef, address_mode, speed);
628
629 if (acost > best_mult_cost)
630 {
631 best_mult_cost = acost;
632 best_mult = offset_int::from (addr->elts[i].coef, SIGNED);
633 }
634 }
635
636 if (!best_mult_cost)
637 return;
638
639 /* Collect elements multiplied by best_mult. */
640 for (i = j = 0; i < addr->n; i++)
641 {
642 offset_int amult = offset_int::from (addr->elts[i].coef, SIGNED);
643 offset_int amult_neg = -wi::sext (amult, TYPE_PRECISION (addr->type));
644
645 if (amult == best_mult)
646 op_code = PLUS_EXPR;
647 else if (amult_neg == best_mult)
648 op_code = MINUS_EXPR;
649 else
650 {
651 addr->elts[j] = addr->elts[i];
652 j++;
653 continue;
654 }
655
656 elt = fold_convert (sizetype, addr->elts[i].val);
657 if (mult_elt)
658 mult_elt = fold_build2 (op_code, sizetype, mult_elt, elt);
659 else if (op_code == PLUS_EXPR)
660 mult_elt = elt;
661 else
662 mult_elt = fold_build1 (NEGATE_EXPR, sizetype, elt);
663 }
664 addr->n = j;
665
666 parts->index = mult_elt;
667 parts->step = wide_int_to_tree (sizetype, best_mult);
668}
669
670/* Splits address ADDR for a memory access of type TYPE into PARTS.
671 If BASE_HINT is non-NULL, it specifies an SSA name to be used
672 preferentially as base of the reference, and IV_CAND is the selected
673 iv candidate used in ADDR. Store true to VAR_IN_BASE if variant
674 part of address is split to PARTS.base.
675
676 TODO -- be more clever about the distribution of the elements of ADDR
677 to PARTS. Some architectures do not support anything but single
678 register in address, possibly with a small integer offset; while
679 create_mem_ref will simplify the address to an acceptable shape
680 later, it would be more efficient to know that asking for complicated
681 addressing modes is useless. */
682
683static void
684addr_to_parts (tree type, aff_tree *addr, tree iv_cand, tree base_hint,
685 struct mem_address *parts, bool *var_in_base, bool speed)
686{
687 tree part;
688 unsigned i;
689
690 parts->symbol = NULL_TREE;
691 parts->base = NULL_TREE;
692 parts->index = NULL_TREE;
693 parts->step = NULL_TREE;
694
695 if (addr->offset != 0)
696 parts->offset = wide_int_to_tree (sizetype, addr->offset);
697 else
698 parts->offset = NULL_TREE;
699
700 /* Try to find a symbol. */
701 move_fixed_address_to_symbol (parts, addr);
702
703 /* Since at the moment there is no reliable way to know how to
704 distinguish between pointer and its offset, we decide if var
705 part is the pointer based on guess. */
706 *var_in_base = (base_hint != NULL && parts->symbol == NULL);
707 if (*var_in_base)
708 *var_in_base = move_hint_to_base (type, parts, base_hint, addr);
709 else
710 move_variant_to_index (parts, addr, iv_cand);
711
712 /* First move the most expensive feasible multiplication to index. */
713 if (!parts->index)
714 most_expensive_mult_to_index (type, parts, addr, speed);
715
716 /* Move pointer into base. */
717 if (!parts->symbol && !parts->base)
718 move_pointer_to_base (parts, addr);
719
720 /* Then try to process the remaining elements. */
721 for (i = 0; i < addr->n; i++)
722 {
723 part = fold_convert (sizetype, addr->elts[i].val);
724 if (addr->elts[i].coef != 1)
725 part = fold_build2 (MULT_EXPR, sizetype, part,
726 wide_int_to_tree (sizetype, addr->elts[i].coef));
727 add_to_parts (parts, part);
728 }
729 if (addr->rest)
730 add_to_parts (parts, fold_convert (sizetype, addr->rest));
731}
732
733/* Force the PARTS to register. */
734
735static void
736gimplify_mem_ref_parts (gimple_stmt_iterator *gsi, struct mem_address *parts)
737{
738 if (parts->base)
739 parts->base = force_gimple_operand_gsi_1 (gsi, parts->base,
740 is_gimple_mem_ref_addr, NULL_TREE,
741 true, GSI_SAME_STMT);
742 if (parts->index)
743 parts->index = force_gimple_operand_gsi (gsi, parts->index,
744 true, NULL_TREE,
745 true, GSI_SAME_STMT);
746}
747
748/* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
749 computations are emitted in front of GSI. TYPE is the mode
750 of created memory reference. IV_CAND is the selected iv candidate in ADDR,
751 and BASE_HINT is non NULL if IV_CAND comes from a base address
752 object. */
753
754tree
755create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr,
756 tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed)
757{
758 bool var_in_base;
759 tree mem_ref, tmp;
760 struct mem_address parts;
761
762 addr_to_parts (type, addr, iv_cand, base_hint, &parts, &var_in_base, speed);
763 gimplify_mem_ref_parts (gsi, &parts);
764 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
765 if (mem_ref)
766 return mem_ref;
767
768 /* The expression is too complicated. Try making it simpler. */
769
770 /* Merge symbol into other parts. */
771 if (parts.symbol)
772 {
773 tmp = parts.symbol;
774 parts.symbol = NULL_TREE;
775 gcc_assert (is_gimple_val (tmp));
776
777 if (parts.base)
778 {
779 gcc_assert (useless_type_conversion_p (sizetype,
780 TREE_TYPE (parts.base)));
781
782 if (parts.index)
783 {
784 /* Add the symbol to base, eventually forcing it to register. */
785 tmp = fold_build_pointer_plus (tmp, parts.base);
786 tmp = force_gimple_operand_gsi_1 (gsi, tmp,
787 is_gimple_mem_ref_addr,
788 NULL_TREE, true,
789 GSI_SAME_STMT);
790 }
791 else
792 {
793 /* Move base to index, then move the symbol to base. */
794 parts.index = parts.base;
795 }
796 parts.base = tmp;
797 }
798 else
799 parts.base = tmp;
800
801 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
802 if (mem_ref)
803 return mem_ref;
804 }
805
806 /* Move multiplication to index by transforming address expression:
807 [... + index << step + ...]
808 into:
809 index' = index << step;
810 [... + index' + ,,,]. */
811 if (parts.step && !integer_onep (parts.step))
812 {
813 gcc_assert (parts.index);
814 parts.index = force_gimple_operand_gsi (gsi,
815 fold_build2 (MULT_EXPR, sizetype,
816 parts.index, parts.step),
817 true, NULL_TREE, true, GSI_SAME_STMT);
818 parts.step = NULL_TREE;
819
820 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
821 if (mem_ref)
822 return mem_ref;
823 }
824
825 /* Add offset to invariant part by transforming address expression:
826 [base + index + offset]
827 into:
828 base' = base + offset;
829 [base' + index]
830 or:
831 index' = index + offset;
832 [base + index']
833 depending on which one is invariant. */
834 if (parts.offset && !integer_zerop (parts.offset))
835 {
836 tree old_base = unshare_expr (parts.base);
837 tree old_index = unshare_expr (parts.index);
838 tree old_offset = unshare_expr (parts.offset);
839
840 tmp = parts.offset;
841 parts.offset = NULL_TREE;
842 /* Add offset to invariant part. */
843 if (!var_in_base)
844 {
845 if (parts.base)
846 {
847 tmp = fold_build_pointer_plus (parts.base, tmp);
848 tmp = force_gimple_operand_gsi_1 (gsi, tmp,
849 is_gimple_mem_ref_addr,
850 NULL_TREE, true,
851 GSI_SAME_STMT);
852 }
853 parts.base = tmp;
854 }
855 else
856 {
857 if (parts.index)
858 {
859 tmp = fold_build_pointer_plus (parts.index, tmp);
860 tmp = force_gimple_operand_gsi_1 (gsi, tmp,
861 is_gimple_mem_ref_addr,
862 NULL_TREE, true,
863 GSI_SAME_STMT);
864 }
865 parts.index = tmp;
866 }
867
868 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
869 if (mem_ref)
870 return mem_ref;
871
872 /* Restore parts.base, index and offset so that we can check if
873 [base + offset] addressing mode is supported in next step.
874 This is necessary for targets only support [base + offset],
875 but not [base + index] addressing mode. */
876 parts.base = old_base;
877 parts.index = old_index;
878 parts.offset = old_offset;
879 }
880
881 /* Transform [base + index + ...] into:
882 base' = base + index;
883 [base' + ...]. */
884 if (parts.index)
885 {
886 tmp = parts.index;
887 parts.index = NULL_TREE;
888 /* Add index to base. */
889 if (parts.base)
890 {
891 tmp = fold_build_pointer_plus (parts.base, tmp);
892 tmp = force_gimple_operand_gsi_1 (gsi, tmp,
893 is_gimple_mem_ref_addr,
894 NULL_TREE, true, GSI_SAME_STMT);
895 }
896 parts.base = tmp;
897
898 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
899 if (mem_ref)
900 return mem_ref;
901 }
902
903 /* Transform [base + offset] into:
904 base' = base + offset;
905 [base']. */
906 if (parts.offset && !integer_zerop (parts.offset))
907 {
908 tmp = parts.offset;
909 parts.offset = NULL_TREE;
910 /* Add offset to base. */
911 if (parts.base)
912 {
913 tmp = fold_build_pointer_plus (parts.base, tmp);
914 tmp = force_gimple_operand_gsi_1 (gsi, tmp,
915 is_gimple_mem_ref_addr,
916 NULL_TREE, true, GSI_SAME_STMT);
917 }
918 parts.base = tmp;
919
920 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
921 if (mem_ref)
922 return mem_ref;
923 }
924
925 /* Verify that the address is in the simplest possible shape
926 (only a register). If we cannot create such a memory reference,
927 something is really wrong. */
928 gcc_assert (parts.symbol == NULL_TREE);
929 gcc_assert (parts.index == NULL_TREE);
930 gcc_assert (!parts.step || integer_onep (parts.step));
931 gcc_assert (!parts.offset || integer_zerop (parts.offset));
932 gcc_unreachable ();
933}
934
935/* Copies components of the address from OP to ADDR. */
936
937void
938get_address_description (tree op, struct mem_address *addr)
939{
940 if (TREE_CODE (TMR_BASE (op)) == ADDR_EXPR)
941 {
942 addr->symbol = TMR_BASE (op);
943 addr->base = TMR_INDEX2 (op);
944 }
945 else
946 {
947 addr->symbol = NULL_TREE;
948 if (TMR_INDEX2 (op))
949 {
950 gcc_assert (integer_zerop (TMR_BASE (op)));
951 addr->base = TMR_INDEX2 (op);
952 }
953 else
954 addr->base = TMR_BASE (op);
955 }
956 addr->index = TMR_INDEX (op);
957 addr->step = TMR_STEP (op);
958 addr->offset = TMR_OFFSET (op);
959}
960
961/* Copies the reference information from OLD_REF to NEW_REF, where
962 NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */
963
964void
965copy_ref_info (tree new_ref, tree old_ref)
966{
967 tree new_ptr_base = NULL_TREE;
968
969 gcc_assert (TREE_CODE (new_ref) == MEM_REF
970 || TREE_CODE (new_ref) == TARGET_MEM_REF);
971
972 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (old_ref);
973 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (old_ref);
974
975 new_ptr_base = TREE_OPERAND (new_ref, 0);
976
977 /* We can transfer points-to information from an old pointer
978 or decl base to the new one. */
979 if (new_ptr_base
980 && TREE_CODE (new_ptr_base) == SSA_NAME
981 && !SSA_NAME_PTR_INFO (new_ptr_base))
982 {
983 tree base = get_base_address (old_ref);
984 if (!base)
985 ;
986 else if ((TREE_CODE (base) == MEM_REF
987 || TREE_CODE (base) == TARGET_MEM_REF)
988 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
989 && SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)))
990 {
991 struct ptr_info_def *new_pi;
992 unsigned int align, misalign;
993
994 duplicate_ssa_name_ptr_info
995 (new_ptr_base, SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)));
996 new_pi = SSA_NAME_PTR_INFO (new_ptr_base);
997 /* We have to be careful about transferring alignment information. */
998 if (get_ptr_info_alignment (new_pi, &align, &misalign)
999 && TREE_CODE (old_ref) == MEM_REF
1000 && !(TREE_CODE (new_ref) == TARGET_MEM_REF
1001 && (TMR_INDEX2 (new_ref)
1002 /* TODO: Below conditions can be relaxed if TMR_INDEX
1003 is an indcution variable and its initial value and
1004 step are aligned. */
1005 || (TMR_INDEX (new_ref) && !TMR_STEP (new_ref))
1006 || (TMR_STEP (new_ref)
1007 && (TREE_INT_CST_LOW (TMR_STEP (new_ref))
1008 < align)))))
1009 {
1010 unsigned int inc = (mem_ref_offset (old_ref).to_short_addr ()
1011 - mem_ref_offset (new_ref).to_short_addr ());
1012 adjust_ptr_info_misalignment (new_pi, inc);
1013 }
1014 else
1015 mark_ptr_info_alignment_unknown (new_pi);
1016 }
1017 else if (VAR_P (base)
1018 || TREE_CODE (base) == PARM_DECL
1019 || TREE_CODE (base) == RESULT_DECL)
1020 {
1021 struct ptr_info_def *pi = get_ptr_info (new_ptr_base);
1022 pt_solution_set_var (&pi->pt, base);
1023 }
1024 }
1025}
1026
1027/* Move constants in target_mem_ref REF to offset. Returns the new target
1028 mem ref if anything changes, NULL_TREE otherwise. */
1029
1030tree
1031maybe_fold_tmr (tree ref)
1032{
1033 struct mem_address addr;
1034 bool changed = false;
1035 tree new_ref, off;
1036
1037 get_address_description (ref, &addr);
1038
1039 if (addr.base
1040 && TREE_CODE (addr.base) == INTEGER_CST
1041 && !integer_zerop (addr.base))
1042 {
1043 addr.offset = fold_binary_to_constant (PLUS_EXPR,
1044 TREE_TYPE (addr.offset),
1045 addr.offset, addr.base);
1046 addr.base = NULL_TREE;
1047 changed = true;
1048 }
1049
1050 if (addr.symbol
1051 && TREE_CODE (TREE_OPERAND (addr.symbol, 0)) == MEM_REF)
1052 {
1053 addr.offset = fold_binary_to_constant
1054 (PLUS_EXPR, TREE_TYPE (addr.offset),
1055 addr.offset,
1056 TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 1));
1057 addr.symbol = TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 0);
1058 changed = true;
1059 }
1060 else if (addr.symbol
1061 && handled_component_p (TREE_OPERAND (addr.symbol, 0)))
1062 {
1063 HOST_WIDE_INT offset;
1064 addr.symbol = build_fold_addr_expr
1065 (get_addr_base_and_unit_offset
1066 (TREE_OPERAND (addr.symbol, 0), &offset));
1067 addr.offset = int_const_binop (PLUS_EXPR,
1068 addr.offset, size_int (offset));
1069 changed = true;
1070 }
1071
1072 if (addr.index && TREE_CODE (addr.index) == INTEGER_CST)
1073 {
1074 off = addr.index;
1075 if (addr.step)
1076 {
1077 off = fold_binary_to_constant (MULT_EXPR, sizetype,
1078 off, addr.step);
1079 addr.step = NULL_TREE;
1080 }
1081
1082 addr.offset = fold_binary_to_constant (PLUS_EXPR,
1083 TREE_TYPE (addr.offset),
1084 addr.offset, off);
1085 addr.index = NULL_TREE;
1086 changed = true;
1087 }
1088
1089 if (!changed)
1090 return NULL_TREE;
1091
1092 /* If we have propagated something into this TARGET_MEM_REF and thus
1093 ended up folding it, always create a new TARGET_MEM_REF regardless
1094 if it is valid in this for on the target - the propagation result
1095 wouldn't be anyway. */
1096 new_ref = create_mem_ref_raw (TREE_TYPE (ref),
1097 TREE_TYPE (addr.offset), &addr, false);
1098 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (ref);
1099 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (ref);
1100 return new_ref;
1101}
1102
1103/* Dump PARTS to FILE. */
1104
1105extern void dump_mem_address (FILE *, struct mem_address *);
1106void
1107dump_mem_address (FILE *file, struct mem_address *parts)
1108{
1109 if (parts->symbol)
1110 {
1111 fprintf (file, "symbol: ");
1112 print_generic_expr (file, TREE_OPERAND (parts->symbol, 0), TDF_SLIM);
1113 fprintf (file, "\n");
1114 }
1115 if (parts->base)
1116 {
1117 fprintf (file, "base: ");
1118 print_generic_expr (file, parts->base, TDF_SLIM);
1119 fprintf (file, "\n");
1120 }
1121 if (parts->index)
1122 {
1123 fprintf (file, "index: ");
1124 print_generic_expr (file, parts->index, TDF_SLIM);
1125 fprintf (file, "\n");
1126 }
1127 if (parts->step)
1128 {
1129 fprintf (file, "step: ");
1130 print_generic_expr (file, parts->step, TDF_SLIM);
1131 fprintf (file, "\n");
1132 }
1133 if (parts->offset)
1134 {
1135 fprintf (file, "offset: ");
1136 print_generic_expr (file, parts->offset, TDF_SLIM);
1137 fprintf (file, "\n");
1138 }
1139}
1140
1141#include "gt-tree-ssa-address.h"
1142