1/* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2024 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "backend.h"
25#include "tree.h"
26#include "gimple.h"
27#include "tree-pass.h"
28#include "ssa.h"
29#include "gimple-pretty-print.h"
30#include "fold-const.h"
31#include "tree-object-size.h"
32#include "gimple-iterator.h"
33#include "gimple-fold.h"
34#include "tree-cfg.h"
35#include "tree-dfa.h"
36#include "stringpool.h"
37#include "attribs.h"
38#include "builtins.h"
39#include "gimplify-me.h"
40
41struct object_size_info
42{
43 int object_size_type;
44 unsigned char pass;
45 bool changed;
46 bitmap visited, reexamine;
47 unsigned int *depths;
48 unsigned int *stack, *tos;
49};
50
51struct GTY(()) object_size
52{
53 /* Estimate of bytes till the end of the object. */
54 tree size;
55 /* Estimate of the size of the whole object. */
56 tree wholesize;
57};
58
59static tree compute_object_offset (tree, const_tree);
60static bool addr_object_size (struct object_size_info *,
61 const_tree, int, tree *, tree *t = NULL);
62static tree alloc_object_size (const gcall *, int);
63static tree pass_through_call (const gcall *);
64static void collect_object_sizes_for (struct object_size_info *, tree);
65static void expr_object_size (struct object_size_info *, tree, tree);
66static bool merge_object_sizes (struct object_size_info *, tree, tree);
67static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *);
68static bool cond_expr_object_size (struct object_size_info *, tree, gimple *);
69static void init_offset_limit (void);
70static void check_for_plus_in_loops (struct object_size_info *, tree);
71static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
72 unsigned int);
73
74/* object_sizes[0] is upper bound for the object size and number of bytes till
75 the end of the object.
76 object_sizes[1] is upper bound for the object size and number of bytes till
77 the end of the subobject (innermost array or field with address taken).
78 object_sizes[2] is lower bound for the object size and number of bytes till
79 the end of the object and object_sizes[3] lower bound for subobject.
80
81 For static object sizes, the object size and the bytes till the end of the
82 object are both INTEGER_CST. In the dynamic case, they are finally either a
83 gimple variable or an INTEGER_CST. */
84static vec<object_size> object_sizes[OST_END];
85
86/* Bitmaps what object sizes have been computed already. */
87static bitmap computed[OST_END];
88
89/* Maximum value of offset we consider to be addition. */
90static unsigned HOST_WIDE_INT offset_limit;
91
92/* Tell the generic SSA updater what kind of update is needed after the pass
93 executes. */
94static unsigned todo;
95
96/* Return true if VAL represents an initial size for OBJECT_SIZE_TYPE. */
97
98static inline bool
99size_initval_p (tree val, int object_size_type)
100{
101 return ((object_size_type & OST_MINIMUM)
102 ? integer_all_onesp (val) : integer_zerop (val));
103}
104
105/* Return true if VAL represents an unknown size for OBJECT_SIZE_TYPE. */
106
107static inline bool
108size_unknown_p (tree val, int object_size_type)
109{
110 return ((object_size_type & OST_MINIMUM)
111 ? integer_zerop (val) : integer_all_onesp (val));
112}
113
114/* Return true if VAL represents a valid size for OBJECT_SIZE_TYPE. */
115
116static inline bool
117size_valid_p (tree val, int object_size_type)
118{
119 return ((object_size_type & OST_DYNAMIC) || TREE_CODE (val) == INTEGER_CST);
120}
121
122/* Return true if VAL is usable as an object size in the object_sizes
123 vectors. */
124
125static inline bool
126size_usable_p (tree val)
127{
128 return TREE_CODE (val) == SSA_NAME || TREE_CODE (val) == INTEGER_CST;
129}
130
131/* Return a tree with initial value for OBJECT_SIZE_TYPE. */
132
133static inline tree
134size_initval (int object_size_type)
135{
136 return ((object_size_type & OST_MINIMUM)
137 ? TYPE_MAX_VALUE (sizetype) : size_zero_node);
138}
139
140/* Return a tree with unknown value for OBJECT_SIZE_TYPE. */
141
142static inline tree
143size_unknown (int object_size_type)
144{
145 return ((object_size_type & OST_MINIMUM)
146 ? size_zero_node : TYPE_MAX_VALUE (sizetype));
147}
148
149/* Grow object_sizes[OBJECT_SIZE_TYPE] to num_ssa_names. */
150
151static inline void
152object_sizes_grow (int object_size_type)
153{
154 if (num_ssa_names > object_sizes[object_size_type].length ())
155 object_sizes[object_size_type].safe_grow (num_ssa_names, exact: true);
156}
157
158/* Release object_sizes[OBJECT_SIZE_TYPE]. */
159
160static inline void
161object_sizes_release (int object_size_type)
162{
163 object_sizes[object_size_type].release ();
164}
165
166/* Return true if object_sizes[OBJECT_SIZE_TYPE][VARNO] is unknown. */
167
168static inline bool
169object_sizes_unknown_p (int object_size_type, unsigned varno)
170{
171 return size_unknown_p (val: object_sizes[object_size_type][varno].size,
172 object_size_type);
173}
174
175/* Return the raw size expression for VARNO corresponding to OSI. This returns
176 the TREE_VEC as is and should only be used during gimplification. */
177
178static inline object_size
179object_sizes_get_raw (struct object_size_info *osi, unsigned varno)
180{
181 gcc_assert (osi->pass != 0);
182 return object_sizes[osi->object_size_type][varno];
183}
184
185/* Return a size tree for VARNO corresponding to OSI. If WHOLE is true, return
186 the whole object size. Use this for building size expressions based on size
187 of VARNO. */
188
189static inline tree
190object_sizes_get (struct object_size_info *osi, unsigned varno,
191 bool whole = false)
192{
193 tree ret;
194 int object_size_type = osi->object_size_type;
195
196 if (whole)
197 ret = object_sizes[object_size_type][varno].wholesize;
198 else
199 ret = object_sizes[object_size_type][varno].size;
200
201 if (object_size_type & OST_DYNAMIC)
202 {
203 if (TREE_CODE (ret) == MODIFY_EXPR)
204 return TREE_OPERAND (ret, 0);
205 else if (TREE_CODE (ret) == TREE_VEC)
206 return TREE_VEC_ELT (ret, TREE_VEC_LENGTH (ret) - 1);
207 else
208 gcc_checking_assert (size_usable_p (ret));
209 }
210
211 return ret;
212}
213
214/* Set size for VARNO corresponding to OSI to VAL. */
215
216static inline void
217object_sizes_initialize (struct object_size_info *osi, unsigned varno,
218 tree val, tree wholeval)
219{
220 int object_size_type = osi->object_size_type;
221
222 object_sizes[object_size_type][varno].size = val;
223 object_sizes[object_size_type][varno].wholesize = wholeval;
224}
225
226/* Return a MODIFY_EXPR for cases where SSA and EXPR have the same type. The
227 TREE_VEC is returned only in case of PHI nodes. */
228
229static tree
230bundle_sizes (tree name, tree expr)
231{
232 gcc_checking_assert (TREE_TYPE (name) == sizetype);
233
234 if (TREE_CODE (expr) == TREE_VEC)
235 {
236 TREE_VEC_ELT (expr, TREE_VEC_LENGTH (expr) - 1) = name;
237 return expr;
238 }
239
240 gcc_checking_assert (types_compatible_p (TREE_TYPE (expr), sizetype));
241 return build2 (MODIFY_EXPR, sizetype, name, expr);
242}
243
244/* Set size for VARNO corresponding to OSI to VAL if it is the new minimum or
245 maximum. For static sizes, each element of TREE_VEC is always INTEGER_CST
246 throughout the computation. For dynamic sizes, each element may either be a
247 gimple variable, a MODIFY_EXPR or a TREE_VEC. The MODIFY_EXPR is for
248 expressions that need to be gimplified. TREE_VECs are special, they're
249 emitted only for GIMPLE_PHI and the PHI result variable is the last element
250 of the vector. */
251
252static bool
253object_sizes_set (struct object_size_info *osi, unsigned varno, tree val,
254 tree wholeval)
255{
256 int object_size_type = osi->object_size_type;
257 object_size osize = object_sizes[object_size_type][varno];
258 bool changed = true;
259
260 tree oldval = osize.size;
261 tree old_wholeval = osize.wholesize;
262
263 if (object_size_type & OST_DYNAMIC)
264 {
265 if (bitmap_bit_p (osi->reexamine, varno))
266 {
267 val = bundle_sizes (name: oldval, expr: val);
268 wholeval = bundle_sizes (name: old_wholeval, expr: wholeval);
269 }
270 else
271 {
272 gcc_checking_assert (size_initval_p (oldval, object_size_type));
273 gcc_checking_assert (size_initval_p (old_wholeval,
274 object_size_type));
275 /* For dynamic object sizes, all object sizes that are not gimple
276 variables will need to be gimplified. */
277 if (wholeval != val && !size_usable_p (val: wholeval))
278 {
279 bitmap_set_bit (osi->reexamine, varno);
280 wholeval = bundle_sizes (name: make_ssa_name (sizetype), expr: wholeval);
281 }
282 if (!size_usable_p (val))
283 {
284 bitmap_set_bit (osi->reexamine, varno);
285 tree newval = bundle_sizes (name: make_ssa_name (sizetype), expr: val);
286 if (val == wholeval)
287 wholeval = newval;
288 val = newval;
289 }
290 /* If the new value is a temporary variable, mark it for
291 reexamination. */
292 else if (TREE_CODE (val) == SSA_NAME && !SSA_NAME_DEF_STMT (val))
293 bitmap_set_bit (osi->reexamine, varno);
294 }
295 }
296 else
297 {
298 enum tree_code code = (object_size_type & OST_MINIMUM
299 ? MIN_EXPR : MAX_EXPR);
300
301 val = size_binop (code, val, oldval);
302 wholeval = size_binop (code, wholeval, old_wholeval);
303 changed = (tree_int_cst_compare (t1: val, t2: oldval) != 0
304 || tree_int_cst_compare (t1: old_wholeval, t2: wholeval) != 0);
305 }
306
307 object_sizes[object_size_type][varno].size = val;
308 object_sizes[object_size_type][varno].wholesize = wholeval;
309
310 return changed;
311}
312
313/* Set temporary SSA names for object size and whole size to resolve dependency
314 loops in dynamic size computation. */
315
316static inline void
317object_sizes_set_temp (struct object_size_info *osi, unsigned varno)
318{
319 tree val = object_sizes_get (osi, varno);
320
321 if (size_initval_p (val, object_size_type: osi->object_size_type))
322 object_sizes_set (osi, varno,
323 val: make_ssa_name (sizetype),
324 wholeval: make_ssa_name (sizetype));
325}
326
327/* Initialize OFFSET_LIMIT variable. */
328static void
329init_offset_limit (void)
330{
331 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
332 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
333 else
334 offset_limit = -1;
335 offset_limit /= 2;
336}
337
338/* Bytes at end of the object with SZ from offset OFFSET. If WHOLESIZE is not
339 NULL_TREE, use it to get the net offset of the pointer, which should always
340 be positive and hence, be within OFFSET_LIMIT for valid offsets. */
341
342static tree
343size_for_offset (tree sz, tree offset, tree wholesize = NULL_TREE)
344{
345 gcc_checking_assert (types_compatible_p (TREE_TYPE (sz), sizetype));
346
347 /* For negative offsets, if we have a distinct WHOLESIZE, use it to get a net
348 offset from the whole object. */
349 if (wholesize && wholesize != sz
350 && (TREE_CODE (sz) != INTEGER_CST
351 || TREE_CODE (wholesize) != INTEGER_CST
352 || tree_int_cst_compare (t1: sz, t2: wholesize)))
353 {
354 gcc_checking_assert (types_compatible_p (TREE_TYPE (wholesize),
355 sizetype));
356
357 /* Restructure SZ - OFFSET as
358 WHOLESIZE - (WHOLESIZE + OFFSET - SZ) so that the offset part, i.e.
359 WHOLESIZE + OFFSET - SZ is only allowed to be positive. */
360 tree tmp = size_binop (MAX_EXPR, wholesize, sz);
361 offset = fold_build2 (PLUS_EXPR, sizetype, tmp, offset);
362 offset = fold_build2 (MINUS_EXPR, sizetype, offset, sz);
363 sz = tmp;
364 }
365
366 /* Safe to convert now, since a valid net offset should be non-negative. */
367 if (!useless_type_conversion_p (sizetype, TREE_TYPE (offset)))
368 offset = fold_convert (sizetype, offset);
369
370 if (TREE_CODE (offset) == INTEGER_CST)
371 {
372 if (integer_zerop (offset))
373 return sz;
374
375 /* Negative or too large offset even after adjustment, cannot be within
376 bounds of an object. */
377 if (compare_tree_int (offset, offset_limit) > 0)
378 return size_zero_node;
379 }
380
381 return size_binop (MINUS_EXPR, size_binop (MAX_EXPR, sz, offset), offset);
382}
383
384/* Compute offset of EXPR within VAR. Return error_mark_node
385 if unknown. */
386
387static tree
388compute_object_offset (tree expr, const_tree var)
389{
390 enum tree_code code = PLUS_EXPR;
391 tree base, off, t;
392
393 if (expr == var)
394 return size_zero_node;
395
396 switch (TREE_CODE (expr))
397 {
398 case COMPONENT_REF:
399 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
400 if (base == error_mark_node)
401 return base;
402
403 t = TREE_OPERAND (expr, 1);
404 off = size_binop (PLUS_EXPR,
405 component_ref_field_offset (expr),
406 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
407 / BITS_PER_UNIT));
408 break;
409
410 case REALPART_EXPR:
411 CASE_CONVERT:
412 case VIEW_CONVERT_EXPR:
413 case NON_LVALUE_EXPR:
414 return compute_object_offset (TREE_OPERAND (expr, 0), var);
415
416 case IMAGPART_EXPR:
417 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
418 if (base == error_mark_node)
419 return base;
420
421 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
422 break;
423
424 case ARRAY_REF:
425 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
426 if (base == error_mark_node)
427 return base;
428
429 t = TREE_OPERAND (expr, 1);
430 tree low_bound, unit_size;
431 low_bound = array_ref_low_bound (CONST_CAST_TREE (expr));
432 unit_size = array_ref_element_size (CONST_CAST_TREE (expr));
433 if (! integer_zerop (low_bound))
434 t = fold_build2 (MINUS_EXPR, TREE_TYPE (t), t, low_bound);
435 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
436 {
437 code = MINUS_EXPR;
438 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
439 }
440 t = fold_convert (sizetype, t);
441 off = size_binop (MULT_EXPR, unit_size, t);
442 break;
443
444 case MEM_REF:
445 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
446 return wide_int_to_tree (sizetype, cst: mem_ref_offset (expr));
447
448 default:
449 return error_mark_node;
450 }
451
452 return size_binop (code, base, off);
453}
454
455/* Returns the size of the object designated by DECL considering its
456 initializer if it either has one or if it would not affect its size,
457 otherwise the size of the object without the initializer when MIN
458 is true, else null. An object's initializer affects the object's
459 size if it's a struct type with a flexible array member. */
460
461tree
462decl_init_size (tree decl, bool min)
463{
464 tree size = DECL_SIZE_UNIT (decl);
465 tree type = TREE_TYPE (decl);
466 if (TREE_CODE (type) != RECORD_TYPE)
467 return size;
468
469 tree last = last_field (type);
470 if (!last)
471 return size;
472
473 tree last_type = TREE_TYPE (last);
474 if (TREE_CODE (last_type) != ARRAY_TYPE
475 || TYPE_SIZE (last_type))
476 return size;
477
478 /* Use TYPE_SIZE_UNIT; DECL_SIZE_UNIT sometimes reflects the size
479 of the initializer and sometimes doesn't. */
480 size = TYPE_SIZE_UNIT (type);
481 tree ref = build3 (COMPONENT_REF, type, decl, last, NULL_TREE);
482 tree compsize = component_ref_size (ref);
483 if (!compsize)
484 return min ? size : NULL_TREE;
485
486 /* The size includes tail padding and initializer elements. */
487 tree pos = byte_position (last);
488 size = fold_build2 (PLUS_EXPR, TREE_TYPE (size), pos, compsize);
489 return size;
490}
491
492/* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
493 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
494 If unknown, return size_unknown (object_size_type). */
495
496static bool
497addr_object_size (struct object_size_info *osi, const_tree ptr,
498 int object_size_type, tree *psize, tree *pwholesize)
499{
500 tree pt_var, pt_var_size = NULL_TREE, pt_var_wholesize = NULL_TREE;
501 tree var_size, bytes, wholebytes;
502
503 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
504
505 /* Set to unknown and overwrite just before returning if the size
506 could be determined. */
507 *psize = size_unknown (object_size_type);
508 if (pwholesize)
509 *pwholesize = size_unknown (object_size_type);
510
511 pt_var = TREE_OPERAND (ptr, 0);
512 while (handled_component_p (t: pt_var))
513 pt_var = TREE_OPERAND (pt_var, 0);
514
515 if (!pt_var)
516 return false;
517
518 if (TREE_CODE (pt_var) == MEM_REF)
519 {
520 tree sz, wholesize;
521
522 if (!osi || (object_size_type & OST_SUBOBJECT) != 0
523 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
524 {
525 compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
526 object_size_type & ~OST_SUBOBJECT, &sz);
527 wholesize = sz;
528 }
529 else
530 {
531 tree var = TREE_OPERAND (pt_var, 0);
532 if (osi->pass == 0)
533 collect_object_sizes_for (osi, var);
534 if (bitmap_bit_p (computed[object_size_type],
535 SSA_NAME_VERSION (var)))
536 {
537 sz = object_sizes_get (osi, SSA_NAME_VERSION (var));
538 wholesize = object_sizes_get (osi, SSA_NAME_VERSION (var), whole: true);
539 }
540 else
541 sz = wholesize = size_unknown (object_size_type);
542 }
543 if (!size_unknown_p (val: sz, object_size_type))
544 sz = size_for_offset (sz, TREE_OPERAND (pt_var, 1), wholesize);
545
546 if (!size_unknown_p (val: sz, object_size_type)
547 && (TREE_CODE (sz) != INTEGER_CST
548 || compare_tree_int (sz, offset_limit) < 0))
549 {
550 pt_var_size = sz;
551 pt_var_wholesize = wholesize;
552 }
553 }
554 else if (DECL_P (pt_var))
555 {
556 pt_var_size = pt_var_wholesize
557 = decl_init_size (decl: pt_var, min: object_size_type & OST_MINIMUM);
558 if (!pt_var_size)
559 return false;
560 }
561 else if (TREE_CODE (pt_var) == STRING_CST)
562 pt_var_size = pt_var_wholesize = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
563 else
564 return false;
565
566 if (pt_var_size)
567 {
568 /* Validate the size determined above if it is a constant. */
569 if (TREE_CODE (pt_var_size) == INTEGER_CST
570 && compare_tree_int (pt_var_size, offset_limit) >= 0)
571 return false;
572 }
573
574 if (pt_var != TREE_OPERAND (ptr, 0))
575 {
576 tree var;
577
578 if (object_size_type & OST_SUBOBJECT)
579 {
580 var = TREE_OPERAND (ptr, 0);
581
582 while (var != pt_var
583 && TREE_CODE (var) != BIT_FIELD_REF
584 && TREE_CODE (var) != COMPONENT_REF
585 && TREE_CODE (var) != ARRAY_REF
586 && TREE_CODE (var) != ARRAY_RANGE_REF
587 && TREE_CODE (var) != REALPART_EXPR
588 && TREE_CODE (var) != IMAGPART_EXPR)
589 var = TREE_OPERAND (var, 0);
590 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
591 var = TREE_OPERAND (var, 0);
592 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
593 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
594 || (pt_var_size && TREE_CODE (pt_var_size) == INTEGER_CST
595 && tree_int_cst_lt (t1: pt_var_size,
596 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
597 var = pt_var;
598 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
599 {
600 tree v = var;
601 /* For &X->fld, compute object size if fld isn't a flexible array
602 member. */
603 bool is_flexible_array_mem_ref = false;
604 while (v && v != pt_var)
605 switch (TREE_CODE (v))
606 {
607 case ARRAY_REF:
608 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0))))
609 {
610 tree domain
611 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
612 if (domain && TYPE_MAX_VALUE (domain))
613 {
614 v = NULL_TREE;
615 break;
616 }
617 }
618 v = TREE_OPERAND (v, 0);
619 break;
620 case REALPART_EXPR:
621 case IMAGPART_EXPR:
622 v = NULL_TREE;
623 break;
624 case COMPONENT_REF:
625 /* When the ref is not to an aggregate type, i.e, an array,
626 a record or a union, it will not have flexible size,
627 compute the object size directly. */
628 if (!AGGREGATE_TYPE_P (TREE_TYPE (v)))
629 {
630 v = NULL_TREE;
631 break;
632 }
633 /* if the ref is to a record or union type, but the type
634 does not include a flexible array recursively, compute
635 the object size directly. */
636 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (v)))
637 {
638 if (!TYPE_INCLUDES_FLEXARRAY (TREE_TYPE (v)))
639 {
640 v = NULL_TREE;
641 break;
642 }
643 else
644 {
645 v = TREE_OPERAND (v, 0);
646 break;
647 }
648 }
649 /* Now the ref is to an array type. */
650 gcc_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
651 is_flexible_array_mem_ref = array_ref_flexible_size_p (v);
652 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
653 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
654 != UNION_TYPE
655 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
656 != QUAL_UNION_TYPE)
657 break;
658 else
659 v = TREE_OPERAND (v, 0);
660 if (TREE_CODE (v) == COMPONENT_REF
661 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
662 == RECORD_TYPE)
663 {
664 /* compute object size only if v is not a
665 flexible array member. */
666 if (!is_flexible_array_mem_ref)
667 {
668 v = NULL_TREE;
669 break;
670 }
671 v = TREE_OPERAND (v, 0);
672 }
673 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
674 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
675 != UNION_TYPE
676 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
677 != QUAL_UNION_TYPE)
678 break;
679 else
680 v = TREE_OPERAND (v, 0);
681 if (v != pt_var)
682 v = NULL_TREE;
683 else
684 v = pt_var;
685 break;
686 default:
687 v = pt_var;
688 break;
689 }
690 if (v == pt_var)
691 var = pt_var;
692 }
693 }
694 else
695 var = pt_var;
696
697 if (var != pt_var)
698 {
699 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
700 if (!TREE_CONSTANT (var_size))
701 var_size = get_or_create_ssa_default_def (cfun, var_size);
702 if (!var_size)
703 return false;
704 }
705 else if (!pt_var_size)
706 return false;
707 else
708 var_size = pt_var_size;
709 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
710 if (bytes != error_mark_node)
711 {
712 bytes = size_for_offset (sz: var_size, offset: bytes);
713 if (var != pt_var && pt_var_size && TREE_CODE (pt_var) == MEM_REF)
714 {
715 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0),
716 var: pt_var);
717 if (bytes2 != error_mark_node)
718 {
719 bytes2 = size_for_offset (sz: pt_var_size, offset: bytes2);
720 bytes = size_binop (MIN_EXPR, bytes, bytes2);
721 }
722 }
723 }
724 else
725 bytes = size_unknown (object_size_type);
726
727 wholebytes
728 = object_size_type & OST_SUBOBJECT ? var_size : pt_var_wholesize;
729 }
730 else if (!pt_var_size)
731 return false;
732 else
733 {
734 bytes = pt_var_size;
735 wholebytes = pt_var_wholesize;
736 }
737
738 if (!size_unknown_p (val: bytes, object_size_type)
739 && size_valid_p (val: bytes, object_size_type)
740 && !size_unknown_p (val: bytes, object_size_type)
741 && size_valid_p (val: wholebytes, object_size_type))
742 {
743 *psize = bytes;
744 if (pwholesize)
745 *pwholesize = wholebytes;
746 return true;
747 }
748
749 return false;
750}
751
752
753/* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
754 Handles calls to functions declared with attribute alloc_size.
755 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
756 If unknown, return size_unknown (object_size_type). */
757
758static tree
759alloc_object_size (const gcall *call, int object_size_type)
760{
761 gcc_assert (is_gimple_call (call));
762
763 tree calltype;
764 tree callfn = gimple_call_fndecl (gs: call);
765 if (callfn)
766 calltype = TREE_TYPE (callfn);
767 else
768 calltype = gimple_call_fntype (gs: call);
769
770 if (!calltype)
771 return size_unknown (object_size_type);
772
773 /* Set to positions of alloc_size arguments. */
774 int arg1 = -1, arg2 = -1;
775 tree alloc_size = lookup_attribute (attr_name: "alloc_size",
776 TYPE_ATTRIBUTES (calltype));
777 if (alloc_size && TREE_VALUE (alloc_size))
778 {
779 tree p = TREE_VALUE (alloc_size);
780
781 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
782 if (TREE_CHAIN (p))
783 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
784 }
785 else if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
786 && callfn
787 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callfn)))
788 arg1 = 0;
789
790 /* Non-const arguments are OK here, let the caller handle constness. */
791 if (arg1 < 0
792 || (unsigned) arg1 >= gimple_call_num_args (gs: call)
793 || (arg2 >= 0 && (unsigned) arg2 >= gimple_call_num_args (gs: call)))
794 return size_unknown (object_size_type);
795
796 tree targ1 = gimple_call_arg (gs: call, index: arg1);
797 if (!INTEGRAL_TYPE_P (TREE_TYPE (targ1))
798 || TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (sizetype))
799 return size_unknown (object_size_type);
800 targ1 = fold_convert (sizetype, targ1);
801 tree bytes = NULL_TREE;
802 if (arg2 >= 0)
803 {
804 tree targ2 = gimple_call_arg (gs: call, index: arg2);
805 if (!INTEGRAL_TYPE_P (TREE_TYPE (targ2))
806 || TYPE_PRECISION (TREE_TYPE (targ2)) > TYPE_PRECISION (sizetype))
807 return size_unknown (object_size_type);
808 targ2 = fold_convert (sizetype, targ2);
809 bytes = size_binop (MULT_EXPR, targ1, targ2);
810 }
811 else
812 bytes = targ1;
813
814 return bytes ? bytes : size_unknown (object_size_type);
815}
816
817/* Compute __builtin_object_size for CALL, which is a call to either
818 BUILT_IN_STRDUP or BUILT_IN_STRNDUP; IS_STRNDUP indicates which it is.
819 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
820 If unknown, return size_unknown (object_size_type). */
821
822static tree
823strdup_object_size (const gcall *call, int object_size_type, bool is_strndup)
824{
825 tree src = gimple_call_arg (gs: call, index: 0);
826 tree sz = size_unknown (object_size_type);
827 tree n = NULL_TREE;
828
829 if (is_strndup)
830 n = fold_build2 (PLUS_EXPR, sizetype, size_one_node,
831 gimple_call_arg (call, 1));
832 /* For strdup, simply emit strlen (SRC) + 1 and let the optimizer fold it the
833 way it likes. */
834 else
835 {
836 tree strlen_fn = builtin_decl_implicit (fncode: BUILT_IN_STRLEN);
837 if (strlen_fn)
838 {
839 sz = fold_build2 (PLUS_EXPR, sizetype, size_one_node,
840 build_call_expr (strlen_fn, 1, src));
841 todo = TODO_update_ssa_only_virtuals;
842 }
843 }
844
845 /* In all other cases, return the size of SRC since the object size cannot
846 exceed that. We cannot do this for OST_MINIMUM unless SRC points into a
847 string constant since otherwise the object size could go all the way down
848 to zero. */
849 if (!size_valid_p (val: sz, object_size_type)
850 || size_unknown_p (val: sz, object_size_type))
851 {
852 tree wholesrc = NULL_TREE;
853 if (TREE_CODE (src) == ADDR_EXPR)
854 wholesrc = get_base_address (TREE_OPERAND (src, 0));
855
856 /* If the source points within a string constant, we try to get its
857 length. */
858 if (wholesrc && TREE_CODE (wholesrc) == STRING_CST)
859 {
860 tree len = c_strlen (src, 0);
861 if (len)
862 sz = fold_build2 (PLUS_EXPR, sizetype, size_one_node, len);
863 }
864
865 /* For maximum estimate, our next best guess is the object size of the
866 source. */
867 if (size_unknown_p (val: sz, object_size_type)
868 && !(object_size_type & OST_MINIMUM))
869 compute_builtin_object_size (src, object_size_type, &sz);
870 }
871
872 /* String duplication allocates at least one byte, so we should never fail
873 for OST_MINIMUM. */
874 if ((!size_valid_p (val: sz, object_size_type)
875 || size_unknown_p (val: sz, object_size_type))
876 && (object_size_type & OST_MINIMUM))
877 sz = size_one_node;
878
879 /* Factor in the N. */
880 return n ? fold_build2 (MIN_EXPR, sizetype, n, sz) : sz;
881}
882
883/* If object size is propagated from one of function's arguments directly
884 to its return value, return that argument for GIMPLE_CALL statement CALL.
885 Otherwise return NULL. */
886
887static tree
888pass_through_call (const gcall *call)
889{
890 unsigned rf = gimple_call_return_flags (call);
891 if (rf & ERF_RETURNS_ARG)
892 {
893 unsigned argnum = rf & ERF_RETURN_ARG_MASK;
894 if (argnum < gimple_call_num_args (gs: call))
895 return gimple_call_arg (gs: call, index: argnum);
896 }
897
898 /* __builtin_assume_aligned is intentionally not marked RET1. */
899 if (gimple_call_builtin_p (call, BUILT_IN_ASSUME_ALIGNED))
900 return gimple_call_arg (gs: call, index: 0);
901
902 return NULL_TREE;
903}
904
905/* Emit PHI nodes for size expressions fo. */
906
907static void
908emit_phi_nodes (gimple *stmt, tree size, tree wholesize)
909{
910 tree phires;
911 gphi *wholephi = NULL;
912
913 if (wholesize != size)
914 {
915 phires = TREE_VEC_ELT (wholesize, TREE_VEC_LENGTH (wholesize) - 1);
916 wholephi = create_phi_node (phires, gimple_bb (g: stmt));
917 }
918
919 phires = TREE_VEC_ELT (size, TREE_VEC_LENGTH (size) - 1);
920 gphi *phi = create_phi_node (phires, gimple_bb (g: stmt));
921 gphi *obj_phi = as_a <gphi *> (p: stmt);
922
923 gcc_checking_assert (TREE_CODE (wholesize) == TREE_VEC);
924 gcc_checking_assert (TREE_CODE (size) == TREE_VEC);
925
926 for (unsigned i = 0; i < gimple_phi_num_args (gs: stmt); i++)
927 {
928 gimple_seq seq = NULL;
929 tree wsz = TREE_VEC_ELT (wholesize, i);
930 tree sz = TREE_VEC_ELT (size, i);
931
932 /* If we built an expression, we will need to build statements
933 and insert them on the edge right away. */
934 if (TREE_CODE (wsz) != SSA_NAME)
935 wsz = force_gimple_operand (wsz, &seq, true, NULL);
936 if (TREE_CODE (sz) != SSA_NAME)
937 {
938 gimple_seq s;
939 sz = force_gimple_operand (sz, &s, true, NULL);
940 gimple_seq_add_seq (&seq, s);
941 }
942
943 if (seq)
944 gsi_insert_seq_on_edge (gimple_phi_arg_edge (phi: obj_phi, i), seq);
945
946 if (wholephi)
947 add_phi_arg (wholephi, wsz,
948 gimple_phi_arg_edge (phi: obj_phi, i),
949 gimple_phi_arg_location (phi: obj_phi, i));
950
951 add_phi_arg (phi, sz,
952 gimple_phi_arg_edge (phi: obj_phi, i),
953 gimple_phi_arg_location (phi: obj_phi, i));
954 }
955}
956
957/* Descend through EXPR and return size_unknown if it uses any SSA variable
958 object_size_set or object_size_set_temp generated, which turned out to be
959 size_unknown, as noted in UNKNOWNS. */
960
961static tree
962propagate_unknowns (object_size_info *osi, tree expr, bitmap unknowns)
963{
964 int object_size_type = osi->object_size_type;
965
966 switch (TREE_CODE (expr))
967 {
968 case SSA_NAME:
969 if (bitmap_bit_p (unknowns, SSA_NAME_VERSION (expr)))
970 return size_unknown (object_size_type);
971 return expr;
972
973 case MIN_EXPR:
974 case MAX_EXPR:
975 {
976 tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 0),
977 unknowns);
978 if (size_unknown_p (val: res, object_size_type))
979 return res;
980
981 res = propagate_unknowns (osi, TREE_OPERAND (expr, 1), unknowns);
982 if (size_unknown_p (val: res, object_size_type))
983 return res;
984
985 return expr;
986 }
987 case MODIFY_EXPR:
988 {
989 tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 1),
990 unknowns);
991 if (size_unknown_p (val: res, object_size_type))
992 return res;
993 return expr;
994 }
995 case TREE_VEC:
996 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
997 {
998 tree res = propagate_unknowns (osi, TREE_VEC_ELT (expr, i),
999 unknowns);
1000 if (size_unknown_p (val: res, object_size_type))
1001 return res;
1002 }
1003 return expr;
1004 case PLUS_EXPR:
1005 case MINUS_EXPR:
1006 {
1007 tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 0),
1008 unknowns);
1009 if (size_unknown_p (val: res, object_size_type))
1010 return res;
1011
1012 return expr;
1013 }
1014 default:
1015 return expr;
1016 }
1017}
1018
1019/* Walk through size expressions that need reexamination and generate
1020 statements for them. */
1021
1022static void
1023gimplify_size_expressions (object_size_info *osi)
1024{
1025 int object_size_type = osi->object_size_type;
1026 bitmap_iterator bi;
1027 unsigned int i;
1028 bool changed;
1029
1030 /* Step 1: Propagate unknowns into expressions. */
1031 bitmap reexamine = BITMAP_ALLOC (NULL);
1032 bitmap_copy (reexamine, osi->reexamine);
1033 bitmap unknowns = BITMAP_ALLOC (NULL);
1034 do
1035 {
1036 changed = false;
1037 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
1038 {
1039 object_size cur = object_sizes_get_raw (osi, varno: i);
1040
1041 if (size_unknown_p (val: propagate_unknowns (osi, expr: cur.size, unknowns),
1042 object_size_type)
1043 || size_unknown_p (val: propagate_unknowns (osi, expr: cur.wholesize,
1044 unknowns),
1045 object_size_type))
1046 {
1047 /* Record the SSAs we're overwriting to propagate the
1048 unknwons. */
1049 tree oldval = object_sizes_get (osi, varno: i);
1050 tree old_wholeval = object_sizes_get (osi, varno: i, whole: true);
1051
1052 bitmap_set_bit (unknowns, SSA_NAME_VERSION (oldval));
1053 bitmap_set_bit (unknowns, SSA_NAME_VERSION (old_wholeval));
1054 object_sizes_initialize (osi, varno: i,
1055 val: size_unknown (object_size_type),
1056 wholeval: size_unknown (object_size_type));
1057 bitmap_clear_bit (osi->reexamine, i);
1058 changed = true;
1059 }
1060 }
1061 bitmap_copy (reexamine, osi->reexamine);
1062 }
1063 while (changed);
1064
1065 /* Release all unknowns. */
1066 EXECUTE_IF_SET_IN_BITMAP (unknowns, 0, i, bi)
1067 release_ssa_name (ssa_name (i));
1068
1069 BITMAP_FREE (unknowns);
1070 BITMAP_FREE (reexamine);
1071
1072 /* Expand all size expressions to put their definitions close to the objects
1073 for which size is being computed. */
1074 EXECUTE_IF_SET_IN_BITMAP (osi->reexamine, 0, i, bi)
1075 {
1076 gimple_seq seq = NULL;
1077 object_size osize = object_sizes_get_raw (osi, varno: i);
1078
1079 gimple *stmt = SSA_NAME_DEF_STMT (ssa_name (i));
1080 enum gimple_code code = gimple_code (g: stmt);
1081
1082 /* PHI nodes need special attention. */
1083 if (code == GIMPLE_PHI)
1084 emit_phi_nodes (stmt, size: osize.size, wholesize: osize.wholesize);
1085 else
1086 {
1087 tree size_expr = NULL_TREE;
1088
1089 /* Bundle wholesize in with the size to gimplify if needed. */
1090 if (osize.wholesize != osize.size
1091 && !size_usable_p (val: osize.wholesize))
1092 size_expr = size_binop (COMPOUND_EXPR,
1093 osize.wholesize,
1094 osize.size);
1095 else if (!size_usable_p (val: osize.size))
1096 size_expr = osize.size;
1097
1098 if (size_expr)
1099 {
1100 gimple_stmt_iterator gsi;
1101 if (code == GIMPLE_NOP)
1102 gsi = gsi_start_bb (bb: single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1103 else
1104 gsi = gsi_for_stmt (stmt);
1105
1106 force_gimple_operand (size_expr, &seq, true, NULL);
1107 gsi_insert_seq_before (&gsi, seq, GSI_CONTINUE_LINKING);
1108 }
1109 }
1110
1111 /* We're done, so replace the MODIFY_EXPRs with the SSA names. */
1112 object_sizes_initialize (osi, varno: i,
1113 val: object_sizes_get (osi, varno: i),
1114 wholeval: object_sizes_get (osi, varno: i, whole: true));
1115 }
1116}
1117
1118/* Compute __builtin_object_size value for PTR and set *PSIZE to
1119 the resulting value. If the declared object is known and PDECL
1120 is nonnull, sets *PDECL to the object's DECL. OBJECT_SIZE_TYPE
1121 is the second argument to __builtin_object_size.
1122 Returns true on success and false when the object size could not
1123 be determined. */
1124
1125bool
1126compute_builtin_object_size (tree ptr, int object_size_type,
1127 tree *psize)
1128{
1129 gcc_assert (object_size_type >= 0 && object_size_type < OST_END);
1130
1131 /* Set to unknown and overwrite just before returning if the size
1132 could be determined. */
1133 *psize = size_unknown (object_size_type);
1134
1135 if (! offset_limit)
1136 init_offset_limit ();
1137
1138 if (TREE_CODE (ptr) == ADDR_EXPR)
1139 return addr_object_size (NULL, ptr, object_size_type, psize);
1140
1141 if (TREE_CODE (ptr) != SSA_NAME
1142 || !POINTER_TYPE_P (TREE_TYPE (ptr)))
1143 return false;
1144
1145 if (computed[object_size_type] == NULL)
1146 {
1147 if (optimize || object_size_type & OST_SUBOBJECT)
1148 return false;
1149
1150 /* When not optimizing, rather than failing, make a small effort
1151 to determine the object size without the full benefit of
1152 the (costly) computation below. */
1153 gimple *def = SSA_NAME_DEF_STMT (ptr);
1154 if (gimple_code (g: def) == GIMPLE_ASSIGN)
1155 {
1156 tree_code code = gimple_assign_rhs_code (gs: def);
1157 if (code == POINTER_PLUS_EXPR)
1158 {
1159 tree offset = gimple_assign_rhs2 (gs: def);
1160 ptr = gimple_assign_rhs1 (gs: def);
1161
1162 if (((object_size_type & OST_DYNAMIC)
1163 || (tree_fits_shwi_p (offset)
1164 && compare_tree_int (offset, offset_limit) <= 0))
1165 && compute_builtin_object_size (ptr, object_size_type,
1166 psize))
1167 {
1168 *psize = size_for_offset (sz: *psize, offset);
1169 return true;
1170 }
1171 }
1172 }
1173 return false;
1174 }
1175
1176 struct object_size_info osi;
1177 osi.object_size_type = object_size_type;
1178 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
1179 {
1180 bitmap_iterator bi;
1181 unsigned int i;
1182
1183 object_sizes_grow (object_size_type);
1184 if (dump_file)
1185 {
1186 fprintf (stream: dump_file, format: "Computing %s %s%sobject size for ",
1187 (object_size_type & OST_MINIMUM) ? "minimum" : "maximum",
1188 (object_size_type & OST_DYNAMIC) ? "dynamic " : "",
1189 (object_size_type & OST_SUBOBJECT) ? "sub" : "");
1190 print_generic_expr (dump_file, ptr, dump_flags);
1191 fprintf (stream: dump_file, format: ":\n");
1192 }
1193
1194 osi.visited = BITMAP_ALLOC (NULL);
1195 osi.reexamine = BITMAP_ALLOC (NULL);
1196
1197 if (!(object_size_type & OST_DYNAMIC))
1198 {
1199 osi.depths = NULL;
1200 osi.stack = NULL;
1201 osi.tos = NULL;
1202 }
1203
1204 /* First pass: walk UD chains, compute object sizes that can be computed.
1205 osi.reexamine bitmap at the end will contain versions of SSA_NAMES
1206 that need to be reexamined. For both static and dynamic size
1207 computation, reexamination is for propagation across dependency loops.
1208 The dynamic case has the additional use case where the computed
1209 expression needs to be gimplified. */
1210 osi.pass = 0;
1211 osi.changed = false;
1212 collect_object_sizes_for (&osi, ptr);
1213
1214 if (object_size_type & OST_DYNAMIC)
1215 {
1216 osi.pass = 1;
1217 gimplify_size_expressions (osi: &osi);
1218 bitmap_clear (osi.reexamine);
1219 }
1220
1221 /* Second pass: keep recomputing object sizes of variables
1222 that need reexamination, until no object sizes are
1223 increased or all object sizes are computed. */
1224 if (! bitmap_empty_p (map: osi.reexamine))
1225 {
1226 bitmap reexamine = BITMAP_ALLOC (NULL);
1227
1228 /* If looking for minimum instead of maximum object size,
1229 detect cases where a pointer is increased in a loop.
1230 Although even without this detection pass 2 would eventually
1231 terminate, it could take a long time. If a pointer is
1232 increasing this way, we need to assume 0 object size.
1233 E.g. p = &buf[0]; while (cond) p = p + 4; */
1234 if (object_size_type & OST_MINIMUM)
1235 {
1236 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
1237 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
1238 osi.tos = osi.stack;
1239 osi.pass = 1;
1240 /* collect_object_sizes_for is changing
1241 osi.reexamine bitmap, so iterate over a copy. */
1242 bitmap_copy (reexamine, osi.reexamine);
1243 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
1244 if (bitmap_bit_p (osi.reexamine, i))
1245 check_for_plus_in_loops (&osi, ssa_name (i));
1246
1247 free (ptr: osi.depths);
1248 osi.depths = NULL;
1249 free (ptr: osi.stack);
1250 osi.stack = NULL;
1251 osi.tos = NULL;
1252 }
1253
1254 do
1255 {
1256 osi.pass = 2;
1257 osi.changed = false;
1258 /* collect_object_sizes_for is changing
1259 osi.reexamine bitmap, so iterate over a copy. */
1260 bitmap_copy (reexamine, osi.reexamine);
1261 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
1262 if (bitmap_bit_p (osi.reexamine, i))
1263 {
1264 collect_object_sizes_for (&osi, ssa_name (i));
1265 if (dump_file && (dump_flags & TDF_DETAILS))
1266 {
1267 fprintf (stream: dump_file, format: "Reexamining ");
1268 print_generic_expr (dump_file, ssa_name (i),
1269 dump_flags);
1270 fprintf (stream: dump_file, format: "\n");
1271 }
1272 }
1273 }
1274 while (osi.changed);
1275
1276 BITMAP_FREE (reexamine);
1277 }
1278 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
1279 bitmap_set_bit (computed[object_size_type], i);
1280
1281 /* Debugging dumps. */
1282 if (dump_file)
1283 {
1284 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
1285 if (!object_sizes_unknown_p (object_size_type, varno: i))
1286 {
1287 print_generic_expr (dump_file, ssa_name (i),
1288 dump_flags);
1289 fprintf (stream: dump_file,
1290 format: ": %s %s%sobject size ",
1291 ((object_size_type & OST_MINIMUM) ? "minimum"
1292 : "maximum"),
1293 (object_size_type & OST_DYNAMIC) ? "dynamic " : "",
1294 (object_size_type & OST_SUBOBJECT) ? "sub" : "");
1295 print_generic_expr (dump_file, object_sizes_get (osi: &osi, varno: i),
1296 dump_flags);
1297 fprintf (stream: dump_file, format: "\n");
1298 }
1299 }
1300
1301 BITMAP_FREE (osi.reexamine);
1302 BITMAP_FREE (osi.visited);
1303 }
1304
1305 *psize = object_sizes_get (osi: &osi, SSA_NAME_VERSION (ptr));
1306 return !size_unknown_p (val: *psize, object_size_type);
1307}
1308
1309/* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
1310
1311static void
1312expr_object_size (struct object_size_info *osi, tree ptr, tree value)
1313{
1314 int object_size_type = osi->object_size_type;
1315 unsigned int varno = SSA_NAME_VERSION (ptr);
1316 tree bytes, wholesize;
1317
1318 gcc_assert (!object_sizes_unknown_p (object_size_type, varno));
1319 gcc_assert (osi->pass == 0);
1320
1321 if (TREE_CODE (value) == WITH_SIZE_EXPR)
1322 value = TREE_OPERAND (value, 0);
1323
1324 /* Pointer variables should have been handled by merge_object_sizes. */
1325 gcc_assert (TREE_CODE (value) != SSA_NAME
1326 || !POINTER_TYPE_P (TREE_TYPE (value)));
1327
1328 if (TREE_CODE (value) == ADDR_EXPR)
1329 addr_object_size (osi, ptr: value, object_size_type, psize: &bytes, pwholesize: &wholesize);
1330 else
1331 bytes = wholesize = size_unknown (object_size_type);
1332
1333 object_sizes_set (osi, varno, val: bytes, wholeval: wholesize);
1334}
1335
1336
1337/* Compute object_sizes for PTR, defined to the result of a call. */
1338
1339static void
1340call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
1341{
1342 int object_size_type = osi->object_size_type;
1343 unsigned int varno = SSA_NAME_VERSION (ptr);
1344 tree bytes = NULL_TREE;
1345
1346 gcc_assert (is_gimple_call (call));
1347
1348 gcc_assert (!object_sizes_unknown_p (object_size_type, varno));
1349 gcc_assert (osi->pass == 0);
1350
1351 bool is_strdup = gimple_call_builtin_p (call, BUILT_IN_STRDUP);
1352 bool is_strndup = gimple_call_builtin_p (call, BUILT_IN_STRNDUP);
1353 if (is_strdup || is_strndup)
1354 bytes = strdup_object_size (call, object_size_type, is_strndup);
1355 else
1356 bytes = alloc_object_size (call, object_size_type);
1357
1358 if (!size_valid_p (val: bytes, object_size_type))
1359 bytes = size_unknown (object_size_type);
1360
1361 object_sizes_set (osi, varno, val: bytes, wholeval: bytes);
1362}
1363
1364
1365/* Compute object_sizes for PTR, defined to an unknown value. */
1366
1367static void
1368unknown_object_size (struct object_size_info *osi, tree ptr)
1369{
1370 int object_size_type = osi->object_size_type;
1371 unsigned int varno = SSA_NAME_VERSION (ptr);
1372
1373 gcc_checking_assert (!object_sizes_unknown_p (object_size_type, varno));
1374 gcc_checking_assert (osi->pass == 0);
1375 tree bytes = size_unknown (object_size_type);
1376
1377 object_sizes_set (osi, varno, val: bytes, wholeval: bytes);
1378}
1379
1380
1381/* Merge object sizes of ORIG + OFFSET into DEST. Return true if
1382 the object size might need reexamination later. */
1383
1384static bool
1385merge_object_sizes (struct object_size_info *osi, tree dest, tree orig)
1386{
1387 int object_size_type = osi->object_size_type;
1388 unsigned int varno = SSA_NAME_VERSION (dest);
1389 tree orig_bytes, wholesize;
1390
1391 if (object_sizes_unknown_p (object_size_type, varno))
1392 return false;
1393
1394 if (osi->pass == 0)
1395 collect_object_sizes_for (osi, orig);
1396
1397 orig_bytes = object_sizes_get (osi, SSA_NAME_VERSION (orig));
1398 wholesize = object_sizes_get (osi, SSA_NAME_VERSION (orig), whole: true);
1399
1400 if (object_sizes_set (osi, varno, val: orig_bytes, wholeval: wholesize))
1401 osi->changed = true;
1402
1403 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
1404}
1405
1406
1407/* Compute object_sizes for VAR, defined to the result of an assignment
1408 with operator POINTER_PLUS_EXPR. Return true if the object size might
1409 need reexamination later. */
1410
1411static bool
1412plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
1413{
1414 int object_size_type = osi->object_size_type;
1415 unsigned int varno = SSA_NAME_VERSION (var);
1416 tree bytes, wholesize;
1417 tree op0, op1;
1418 bool reexamine = false;
1419
1420 if (gimple_assign_rhs_code (gs: stmt) == POINTER_PLUS_EXPR)
1421 {
1422 op0 = gimple_assign_rhs1 (gs: stmt);
1423 op1 = gimple_assign_rhs2 (gs: stmt);
1424 }
1425 else if (gimple_assign_rhs_code (gs: stmt) == ADDR_EXPR)
1426 {
1427 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
1428 gcc_assert (TREE_CODE (rhs) == MEM_REF);
1429 op0 = TREE_OPERAND (rhs, 0);
1430 op1 = TREE_OPERAND (rhs, 1);
1431 }
1432 else
1433 gcc_unreachable ();
1434
1435 if (object_sizes_unknown_p (object_size_type, varno))
1436 return false;
1437
1438 /* Handle PTR + OFFSET here. */
1439 if (size_valid_p (val: op1, object_size_type)
1440 && (TREE_CODE (op0) == SSA_NAME || TREE_CODE (op0) == ADDR_EXPR))
1441 {
1442 if (TREE_CODE (op0) == SSA_NAME)
1443 {
1444 if (osi->pass == 0)
1445 collect_object_sizes_for (osi, op0);
1446
1447 bytes = object_sizes_get (osi, SSA_NAME_VERSION (op0));
1448 wholesize = object_sizes_get (osi, SSA_NAME_VERSION (op0), whole: true);
1449 reexamine = bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (op0));
1450 }
1451 else
1452 {
1453 /* op0 will be ADDR_EXPR here. We should never come here during
1454 reexamination. */
1455 gcc_checking_assert (osi->pass == 0);
1456 addr_object_size (osi, ptr: op0, object_size_type, psize: &bytes, pwholesize: &wholesize);
1457 }
1458
1459 /* size_for_offset doesn't make sense for -1 size, but it does for size 0
1460 since the wholesize could be non-zero and a negative offset could give
1461 a non-zero size. */
1462 if (size_unknown_p (val: bytes, object_size_type: 0))
1463 ;
1464 else if ((object_size_type & OST_DYNAMIC)
1465 || compare_tree_int (op1, offset_limit) <= 0)
1466 bytes = size_for_offset (sz: bytes, offset: op1, wholesize);
1467 /* In the static case, with a negative offset, the best estimate for
1468 minimum size is size_unknown but for maximum size, the wholesize is a
1469 better estimate than size_unknown. */
1470 else if (object_size_type & OST_MINIMUM)
1471 bytes = size_unknown (object_size_type);
1472 else
1473 bytes = wholesize;
1474 }
1475 else
1476 bytes = wholesize = size_unknown (object_size_type);
1477
1478 if (!size_valid_p (val: bytes, object_size_type)
1479 || !size_valid_p (val: wholesize, object_size_type))
1480 bytes = wholesize = size_unknown (object_size_type);
1481
1482 if (object_sizes_set (osi, varno, val: bytes, wholeval: wholesize))
1483 osi->changed = true;
1484 return reexamine;
1485}
1486
1487/* Compute the dynamic object size for VAR. Return the result in SIZE and
1488 WHOLESIZE. */
1489
1490static void
1491dynamic_object_size (struct object_size_info *osi, tree var,
1492 tree *size, tree *wholesize)
1493{
1494 int object_size_type = osi->object_size_type;
1495
1496 if (TREE_CODE (var) == SSA_NAME)
1497 {
1498 unsigned varno = SSA_NAME_VERSION (var);
1499
1500 collect_object_sizes_for (osi, var);
1501 *size = object_sizes_get (osi, varno);
1502 *wholesize = object_sizes_get (osi, varno, whole: true);
1503 }
1504 else if (TREE_CODE (var) == ADDR_EXPR)
1505 addr_object_size (osi, ptr: var, object_size_type, psize: size, pwholesize: wholesize);
1506 else
1507 *size = *wholesize = size_unknown (object_size_type);
1508}
1509
1510/* Compute object_sizes for VAR, defined at STMT, which is
1511 a COND_EXPR. Return true if the object size might need reexamination
1512 later. */
1513
1514static bool
1515cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt)
1516{
1517 tree then_, else_;
1518 int object_size_type = osi->object_size_type;
1519 unsigned int varno = SSA_NAME_VERSION (var);
1520 bool reexamine = false;
1521
1522 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
1523
1524 if (object_sizes_unknown_p (object_size_type, varno))
1525 return false;
1526
1527 then_ = gimple_assign_rhs2 (gs: stmt);
1528 else_ = gimple_assign_rhs3 (gs: stmt);
1529
1530 if (object_size_type & OST_DYNAMIC)
1531 {
1532 tree then_size, then_wholesize, else_size, else_wholesize;
1533
1534 dynamic_object_size (osi, var: then_, size: &then_size, wholesize: &then_wholesize);
1535 if (!size_unknown_p (val: then_size, object_size_type))
1536 dynamic_object_size (osi, var: else_, size: &else_size, wholesize: &else_wholesize);
1537
1538 tree cond_size, cond_wholesize;
1539 if (size_unknown_p (val: then_size, object_size_type)
1540 || size_unknown_p (val: else_size, object_size_type))
1541 cond_size = cond_wholesize = size_unknown (object_size_type);
1542 else
1543 {
1544 cond_size = fold_build3 (COND_EXPR, sizetype,
1545 gimple_assign_rhs1 (stmt),
1546 then_size, else_size);
1547 cond_wholesize = fold_build3 (COND_EXPR, sizetype,
1548 gimple_assign_rhs1 (stmt),
1549 then_wholesize, else_wholesize);
1550 }
1551
1552 object_sizes_set (osi, varno, val: cond_size, wholeval: cond_wholesize);
1553
1554 return false;
1555 }
1556
1557 if (TREE_CODE (then_) == SSA_NAME)
1558 reexamine |= merge_object_sizes (osi, dest: var, orig: then_);
1559 else
1560 expr_object_size (osi, ptr: var, value: then_);
1561
1562 if (object_sizes_unknown_p (object_size_type, varno))
1563 return reexamine;
1564
1565 if (TREE_CODE (else_) == SSA_NAME)
1566 reexamine |= merge_object_sizes (osi, dest: var, orig: else_);
1567 else
1568 expr_object_size (osi, ptr: var, value: else_);
1569
1570 return reexamine;
1571}
1572
1573/* Find size of an object passed as a parameter to the function. */
1574
1575static void
1576parm_object_size (struct object_size_info *osi, tree var)
1577{
1578 int object_size_type = osi->object_size_type;
1579 tree parm = SSA_NAME_VAR (var);
1580
1581 if (!(object_size_type & OST_DYNAMIC) || !POINTER_TYPE_P (TREE_TYPE (parm)))
1582 {
1583 expr_object_size (osi, ptr: var, value: parm);
1584 return;
1585 }
1586
1587 /* Look for access attribute. */
1588 rdwr_map rdwr_idx;
1589
1590 tree fndecl = cfun->decl;
1591 const attr_access *access = get_parm_access (rdwr_idx, parm, fndecl);
1592 tree typesize = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (parm)));
1593 tree sz = NULL_TREE;
1594
1595 /* If we have an access attribute with a usable size argument... */
1596 if (access && access->sizarg != UINT_MAX
1597 /* ... and either PARM is void * or has a type that is complete and has a
1598 constant size... */
1599 && ((typesize && poly_int_tree_p (t: typesize))
1600 || (!typesize && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))))
1601 {
1602 tree fnargs = DECL_ARGUMENTS (fndecl);
1603 tree arg = NULL_TREE;
1604 unsigned argpos = 0;
1605
1606 /* ... then walk through the parameters to pick the size parameter and
1607 safely scale it by the type size if needed.
1608
1609 TODO: we could also compute the size of VLAs where the size is
1610 given by a function parameter. */
1611 for (arg = fnargs; arg; arg = TREE_CHAIN (arg), ++argpos)
1612 if (argpos == access->sizarg)
1613 {
1614 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (arg)));
1615 sz = get_or_create_ssa_default_def (cfun, arg);
1616 if (sz != NULL_TREE)
1617 {
1618 sz = fold_convert (sizetype, sz);
1619 if (typesize)
1620 sz = size_binop (MULT_EXPR, sz, typesize);
1621 }
1622 break;
1623 }
1624 }
1625 if (!sz)
1626 sz = size_unknown (object_size_type);
1627
1628 object_sizes_set (osi, SSA_NAME_VERSION (var), val: sz, wholeval: sz);
1629}
1630
1631/* Compute an object size expression for VAR, which is the result of a PHI
1632 node. */
1633
1634static void
1635phi_dynamic_object_size (struct object_size_info *osi, tree var)
1636{
1637 int object_size_type = osi->object_size_type;
1638 unsigned int varno = SSA_NAME_VERSION (var);
1639 gimple *stmt = SSA_NAME_DEF_STMT (var);
1640 unsigned i, num_args = gimple_phi_num_args (gs: stmt);
1641 bool wholesize_needed = false;
1642
1643 /* The extra space is for the PHI result at the end, which object_sizes_set
1644 sets for us. */
1645 tree sizes = make_tree_vec (num_args + 1);
1646 tree wholesizes = make_tree_vec (num_args + 1);
1647
1648 /* Bail out if the size of any of the PHI arguments cannot be
1649 determined. */
1650 for (i = 0; i < num_args; i++)
1651 {
1652 edge e = gimple_phi_arg_edge (phi: as_a <gphi *> (p: stmt), i);
1653 if (e->flags & EDGE_COMPLEX)
1654 break;
1655
1656 tree rhs = gimple_phi_arg_def (gs: stmt, index: i);
1657 tree size, wholesize;
1658
1659 dynamic_object_size (osi, var: rhs, size: &size, wholesize: &wholesize);
1660
1661 if (size_unknown_p (val: size, object_size_type))
1662 break;
1663
1664 if (size != wholesize)
1665 wholesize_needed = true;
1666
1667 TREE_VEC_ELT (sizes, i) = size;
1668 TREE_VEC_ELT (wholesizes, i) = wholesize;
1669 }
1670
1671 if (i < num_args)
1672 {
1673 ggc_free (sizes);
1674 ggc_free (wholesizes);
1675 sizes = wholesizes = size_unknown (object_size_type);
1676 }
1677
1678 /* Point to the same TREE_VEC so that we can avoid emitting two PHI
1679 nodes. */
1680 else if (!wholesize_needed)
1681 {
1682 ggc_free (wholesizes);
1683 wholesizes = sizes;
1684 }
1685
1686 object_sizes_set (osi, varno, val: sizes, wholeval: wholesizes);
1687}
1688
1689/* Compute object sizes for VAR.
1690 For ADDR_EXPR an object size is the number of remaining bytes
1691 to the end of the object (where what is considered an object depends on
1692 OSI->object_size_type).
1693 For allocation GIMPLE_CALL like malloc or calloc object size is the size
1694 of the allocation.
1695 For POINTER_PLUS_EXPR where second operand is a constant integer,
1696 object size is object size of the first operand minus the constant.
1697 If the constant is bigger than the number of remaining bytes until the
1698 end of the object, object size is 0, but if it is instead a pointer
1699 subtraction, object size is size_unknown (object_size_type).
1700 To differentiate addition from subtraction, ADDR_EXPR returns
1701 size_unknown (object_size_type) for all objects bigger than half of the
1702 address space, and constants less than half of the address space are
1703 considered addition, while bigger constants subtraction.
1704 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
1705 object size is object size of that argument.
1706 Otherwise, object size is the maximum of object sizes of variables
1707 that it might be set to. */
1708
1709static void
1710collect_object_sizes_for (struct object_size_info *osi, tree var)
1711{
1712 int object_size_type = osi->object_size_type;
1713 unsigned int varno = SSA_NAME_VERSION (var);
1714 gimple *stmt;
1715 bool reexamine;
1716
1717 if (bitmap_bit_p (computed[object_size_type], varno))
1718 return;
1719
1720 if (osi->pass == 0)
1721 {
1722 if (bitmap_set_bit (osi->visited, varno))
1723 {
1724 /* Initialize to 0 for maximum size and M1U for minimum size so that
1725 it gets immediately overridden. */
1726 object_sizes_initialize (osi, varno,
1727 val: size_initval (object_size_type),
1728 wholeval: size_initval (object_size_type));
1729 }
1730 else
1731 {
1732 /* Found a dependency loop. Mark the variable for later
1733 re-examination. */
1734 if (object_size_type & OST_DYNAMIC)
1735 object_sizes_set_temp (osi, varno);
1736
1737 bitmap_set_bit (osi->reexamine, varno);
1738 if (dump_file && (dump_flags & TDF_DETAILS))
1739 {
1740 fprintf (stream: dump_file, format: "Found a dependency loop at ");
1741 print_generic_expr (dump_file, var, dump_flags);
1742 fprintf (stream: dump_file, format: "\n");
1743 }
1744 return;
1745 }
1746 }
1747
1748 if (dump_file && (dump_flags & TDF_DETAILS))
1749 {
1750 fprintf (stream: dump_file, format: "Visiting use-def links for ");
1751 print_generic_expr (dump_file, var, dump_flags);
1752 fprintf (stream: dump_file, format: "\n");
1753 }
1754
1755 stmt = SSA_NAME_DEF_STMT (var);
1756 reexamine = false;
1757
1758 switch (gimple_code (g: stmt))
1759 {
1760 case GIMPLE_ASSIGN:
1761 {
1762 tree rhs = gimple_assign_rhs1 (gs: stmt);
1763 if (gimple_assign_rhs_code (gs: stmt) == POINTER_PLUS_EXPR
1764 || (gimple_assign_rhs_code (gs: stmt) == ADDR_EXPR
1765 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
1766 reexamine = plus_stmt_object_size (osi, var, stmt);
1767 else if (gimple_assign_rhs_code (gs: stmt) == COND_EXPR)
1768 reexamine = cond_expr_object_size (osi, var, stmt);
1769 else if (gimple_assign_single_p (gs: stmt)
1770 || gimple_assign_unary_nop_p (stmt))
1771 {
1772 if (TREE_CODE (rhs) == SSA_NAME
1773 && POINTER_TYPE_P (TREE_TYPE (rhs)))
1774 reexamine = merge_object_sizes (osi, dest: var, orig: rhs);
1775 else
1776 expr_object_size (osi, ptr: var, value: rhs);
1777 }
1778 else
1779 unknown_object_size (osi, ptr: var);
1780 break;
1781 }
1782
1783 case GIMPLE_CALL:
1784 {
1785 gcall *call_stmt = as_a <gcall *> (p: stmt);
1786 tree arg = pass_through_call (call: call_stmt);
1787 if (arg)
1788 {
1789 if (TREE_CODE (arg) == SSA_NAME
1790 && POINTER_TYPE_P (TREE_TYPE (arg)))
1791 reexamine = merge_object_sizes (osi, dest: var, orig: arg);
1792 else
1793 expr_object_size (osi, ptr: var, value: arg);
1794 }
1795 else
1796 call_object_size (osi, ptr: var, call: call_stmt);
1797 break;
1798 }
1799
1800 case GIMPLE_ASM:
1801 /* Pointers defined by __asm__ statements can point anywhere. */
1802 unknown_object_size (osi, ptr: var);
1803 break;
1804
1805 case GIMPLE_NOP:
1806 if (SSA_NAME_VAR (var)
1807 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
1808 parm_object_size (osi, var);
1809 else
1810 /* Uninitialized SSA names point nowhere. */
1811 unknown_object_size (osi, ptr: var);
1812 break;
1813
1814 case GIMPLE_PHI:
1815 {
1816 unsigned i;
1817
1818 if (object_size_type & OST_DYNAMIC)
1819 {
1820 phi_dynamic_object_size (osi, var);
1821 break;
1822 }
1823
1824 for (i = 0; i < gimple_phi_num_args (gs: stmt); i++)
1825 {
1826 tree rhs = gimple_phi_arg (gs: stmt, index: i)->def;
1827
1828 if (object_sizes_unknown_p (object_size_type, varno))
1829 break;
1830
1831 if (TREE_CODE (rhs) == SSA_NAME)
1832 reexamine |= merge_object_sizes (osi, dest: var, orig: rhs);
1833 else if (osi->pass == 0)
1834 expr_object_size (osi, ptr: var, value: rhs);
1835 }
1836 break;
1837 }
1838
1839 default:
1840 gcc_unreachable ();
1841 }
1842
1843 /* Dynamic sizes use placeholder temps to return an answer, so it is always
1844 safe to set COMPUTED for them. */
1845 if ((object_size_type & OST_DYNAMIC)
1846 || !reexamine || object_sizes_unknown_p (object_size_type, varno))
1847 {
1848 bitmap_set_bit (computed[object_size_type], varno);
1849 if (!(object_size_type & OST_DYNAMIC))
1850 bitmap_clear_bit (osi->reexamine, varno);
1851 else if (reexamine)
1852 bitmap_set_bit (osi->reexamine, varno);
1853 }
1854 else
1855 {
1856 bitmap_set_bit (osi->reexamine, varno);
1857 if (dump_file && (dump_flags & TDF_DETAILS))
1858 {
1859 fprintf (stream: dump_file, format: "Need to reexamine ");
1860 print_generic_expr (dump_file, var, dump_flags);
1861 fprintf (stream: dump_file, format: "\n");
1862 }
1863 }
1864}
1865
1866
1867/* Helper function for check_for_plus_in_loops. Called recursively
1868 to detect loops. */
1869
1870static void
1871check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1872 unsigned int depth)
1873{
1874 gimple *stmt = SSA_NAME_DEF_STMT (var);
1875 unsigned int varno = SSA_NAME_VERSION (var);
1876
1877 if (osi->depths[varno])
1878 {
1879 if (osi->depths[varno] != depth)
1880 {
1881 unsigned int *sp;
1882
1883 /* Found a loop involving pointer addition. */
1884 for (sp = osi->tos; sp > osi->stack; )
1885 {
1886 --sp;
1887 bitmap_clear_bit (osi->reexamine, *sp);
1888 bitmap_set_bit (computed[osi->object_size_type], *sp);
1889 object_sizes_set (osi, varno: *sp, size_zero_node,
1890 wholeval: object_sizes_get (osi, varno: *sp, whole: true));
1891 if (*sp == varno)
1892 break;
1893 }
1894 }
1895 return;
1896 }
1897 else if (! bitmap_bit_p (osi->reexamine, varno))
1898 return;
1899
1900 osi->depths[varno] = depth;
1901 *osi->tos++ = varno;
1902
1903 switch (gimple_code (g: stmt))
1904 {
1905
1906 case GIMPLE_ASSIGN:
1907 {
1908 if ((gimple_assign_single_p (gs: stmt)
1909 || gimple_assign_unary_nop_p (stmt))
1910 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1911 {
1912 tree rhs = gimple_assign_rhs1 (gs: stmt);
1913
1914 check_for_plus_in_loops_1 (osi, var: rhs, depth);
1915 }
1916 else if (gimple_assign_rhs_code (gs: stmt) == POINTER_PLUS_EXPR)
1917 {
1918 tree basevar = gimple_assign_rhs1 (gs: stmt);
1919 tree cst = gimple_assign_rhs2 (gs: stmt);
1920
1921 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1922
1923 check_for_plus_in_loops_1 (osi, var: basevar,
1924 depth: depth + !integer_zerop (cst));
1925 }
1926 else
1927 gcc_unreachable ();
1928 break;
1929 }
1930
1931 case GIMPLE_CALL:
1932 {
1933 gcall *call_stmt = as_a <gcall *> (p: stmt);
1934 tree arg = pass_through_call (call: call_stmt);
1935 if (arg)
1936 {
1937 if (TREE_CODE (arg) == SSA_NAME)
1938 check_for_plus_in_loops_1 (osi, var: arg, depth);
1939 else
1940 gcc_unreachable ();
1941 }
1942 break;
1943 }
1944
1945 case GIMPLE_PHI:
1946 {
1947 unsigned i;
1948
1949 for (i = 0; i < gimple_phi_num_args (gs: stmt); i++)
1950 {
1951 tree rhs = gimple_phi_arg (gs: stmt, index: i)->def;
1952
1953 if (TREE_CODE (rhs) == SSA_NAME)
1954 check_for_plus_in_loops_1 (osi, var: rhs, depth);
1955 }
1956 break;
1957 }
1958
1959 default:
1960 gcc_unreachable ();
1961 }
1962
1963 osi->depths[varno] = 0;
1964 osi->tos--;
1965}
1966
1967
1968/* Check if some pointer we are computing object size of is being increased
1969 within a loop. If yes, assume all the SSA variables participating in
1970 that loop have minimum object sizes 0. */
1971
1972static void
1973check_for_plus_in_loops (struct object_size_info *osi, tree var)
1974{
1975 gimple *stmt = SSA_NAME_DEF_STMT (var);
1976
1977 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1978 and looked for a POINTER_PLUS_EXPR in the pass-through
1979 argument, if any. In GIMPLE, however, such an expression
1980 is not a valid call operand. */
1981
1982 if (is_gimple_assign (gs: stmt)
1983 && gimple_assign_rhs_code (gs: stmt) == POINTER_PLUS_EXPR)
1984 {
1985 tree basevar = gimple_assign_rhs1 (gs: stmt);
1986 tree cst = gimple_assign_rhs2 (gs: stmt);
1987
1988 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1989
1990 /* Skip non-positive offsets. */
1991 if (integer_zerop (cst) || compare_tree_int (cst, offset_limit) > 0)
1992 return;
1993
1994 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1995 *osi->tos++ = SSA_NAME_VERSION (basevar);
1996 check_for_plus_in_loops_1 (osi, var, depth: 2);
1997 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1998 osi->tos--;
1999 }
2000}
2001
2002
2003/* Initialize data structures for the object size computation. */
2004
2005void
2006init_object_sizes (void)
2007{
2008 int object_size_type;
2009
2010 if (computed[0])
2011 return;
2012
2013 for (object_size_type = 0; object_size_type < OST_END; object_size_type++)
2014 {
2015 object_sizes_grow (object_size_type);
2016 computed[object_size_type] = BITMAP_ALLOC (NULL);
2017 }
2018
2019 init_offset_limit ();
2020}
2021
2022
2023/* Destroy data structures after the object size computation. */
2024
2025void
2026fini_object_sizes (void)
2027{
2028 int object_size_type;
2029
2030 for (object_size_type = 0; object_size_type < OST_END; object_size_type++)
2031 {
2032 object_sizes_release (object_size_type);
2033 BITMAP_FREE (computed[object_size_type]);
2034 }
2035}
2036
2037/* Dummy valueize function. */
2038
2039static tree
2040do_valueize (tree t)
2041{
2042 return t;
2043}
2044
2045/* Process a __builtin_object_size or __builtin_dynamic_object_size call in
2046 CALL early for subobjects before any object information is lost due to
2047 optimization. Insert a MIN or MAX expression of the result and
2048 __builtin_object_size at I so that it may be processed in the second pass.
2049 __builtin_dynamic_object_size is treated like __builtin_object_size here
2050 since we're only looking for constant bounds. */
2051
2052static void
2053early_object_sizes_execute_one (gimple_stmt_iterator *i, gimple *call)
2054{
2055 tree ost = gimple_call_arg (gs: call, index: 1);
2056 tree lhs = gimple_call_lhs (gs: call);
2057 gcc_assert (lhs != NULL_TREE);
2058
2059 if (!tree_fits_uhwi_p (ost))
2060 return;
2061
2062 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
2063 tree ptr = gimple_call_arg (gs: call, index: 0);
2064
2065 if (object_size_type != 1 && object_size_type != 3)
2066 return;
2067
2068 if (TREE_CODE (ptr) != ADDR_EXPR && TREE_CODE (ptr) != SSA_NAME)
2069 return;
2070
2071 tree type = TREE_TYPE (lhs);
2072 tree bytes;
2073 if (!compute_builtin_object_size (ptr, object_size_type, psize: &bytes)
2074 || !int_fits_type_p (bytes, type))
2075 return;
2076
2077 tree tem = make_ssa_name (var: type);
2078 gimple_call_set_lhs (gs: call, lhs: tem);
2079 enum tree_code code = object_size_type & OST_MINIMUM ? MAX_EXPR : MIN_EXPR;
2080 tree cst = fold_convert (type, bytes);
2081 gimple *g = gimple_build_assign (lhs, code, tem, cst);
2082 gsi_insert_after (i, g, GSI_NEW_STMT);
2083 update_stmt (s: call);
2084}
2085
2086/* Attempt to fold one __builtin_dynamic_object_size call in CALL into an
2087 expression and insert it at I. Return true if it succeeds. */
2088
2089static bool
2090dynamic_object_sizes_execute_one (gimple_stmt_iterator *i, gimple *call)
2091{
2092 gcc_assert (gimple_call_num_args (call) == 2);
2093
2094 tree args[2];
2095 args[0] = gimple_call_arg (gs: call, index: 0);
2096 args[1] = gimple_call_arg (gs: call, index: 1);
2097
2098 location_t loc = EXPR_LOC_OR_LOC (args[0], input_location);
2099 tree result_type = gimple_call_return_type (gs: as_a <gcall *> (p: call));
2100 tree result = fold_builtin_call_array (loc, result_type,
2101 gimple_call_fn (gs: call), 2, args);
2102
2103 if (!result)
2104 return false;
2105
2106 /* fold_builtin_call_array may wrap the result inside a
2107 NOP_EXPR. */
2108 STRIP_NOPS (result);
2109 gimplify_and_update_call_from_tree (i, result);
2110
2111 if (dump_file && (dump_flags & TDF_DETAILS))
2112 {
2113 fprintf (stream: dump_file, format: "Simplified (dynamic)\n ");
2114 print_gimple_stmt (dump_file, call, 0, dump_flags);
2115 fprintf (stream: dump_file, format: " to ");
2116 print_generic_expr (dump_file, result);
2117 fprintf (stream: dump_file, format: "\n");
2118 }
2119 return true;
2120}
2121
2122static unsigned int
2123object_sizes_execute (function *fun, bool early)
2124{
2125 todo = 0;
2126
2127 basic_block bb;
2128 FOR_EACH_BB_FN (bb, fun)
2129 {
2130 gimple_stmt_iterator i;
2131 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (i: &i))
2132 {
2133 tree result;
2134 bool dynamic = false;
2135
2136 gimple *call = gsi_stmt (i);
2137 if (gimple_call_builtin_p (call, BUILT_IN_DYNAMIC_OBJECT_SIZE))
2138 dynamic = true;
2139 else if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
2140 continue;
2141
2142 tree lhs = gimple_call_lhs (gs: call);
2143 if (!lhs)
2144 continue;
2145
2146 init_object_sizes ();
2147
2148 /* If early, only attempt to fold
2149 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
2150 and rather than folding the builtin to the constant if any,
2151 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
2152 call result and the computed constant. Do the same for
2153 __builtin_dynamic_object_size too. */
2154 if (early)
2155 {
2156 early_object_sizes_execute_one (i: &i, call);
2157 continue;
2158 }
2159
2160 if (dynamic)
2161 {
2162 if (dynamic_object_sizes_execute_one (i: &i, call))
2163 continue;
2164 else
2165 {
2166 /* If we could not find a suitable size expression, lower to
2167 __builtin_object_size so that we may at least get a
2168 constant lower or higher estimate. */
2169 tree bosfn = builtin_decl_implicit (fncode: BUILT_IN_OBJECT_SIZE);
2170 gimple_call_set_fndecl (gs: call, decl: bosfn);
2171 update_stmt (s: call);
2172
2173 if (dump_file && (dump_flags & TDF_DETAILS))
2174 {
2175 print_generic_expr (dump_file, gimple_call_arg (gs: call, index: 0),
2176 dump_flags);
2177 fprintf (stream: dump_file,
2178 format: ": Retrying as __builtin_object_size\n");
2179 }
2180 }
2181 }
2182
2183 result = gimple_fold_stmt_to_constant (call, do_valueize);
2184 if (!result)
2185 {
2186 tree ost = gimple_call_arg (gs: call, index: 1);
2187
2188 if (tree_fits_uhwi_p (ost))
2189 {
2190 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
2191
2192 if (object_size_type & OST_MINIMUM)
2193 result = build_zero_cst (size_type_node);
2194 else if (object_size_type < OST_END)
2195 result = fold_convert (size_type_node,
2196 integer_minus_one_node);
2197 }
2198
2199 if (!result)
2200 continue;
2201 }
2202
2203 gcc_assert (TREE_CODE (result) == INTEGER_CST);
2204
2205 if (dump_file && (dump_flags & TDF_DETAILS))
2206 {
2207 fprintf (stream: dump_file, format: "Simplified\n ");
2208 print_gimple_stmt (dump_file, call, 0, dump_flags);
2209 fprintf (stream: dump_file, format: " to ");
2210 print_generic_expr (dump_file, result);
2211 fprintf (stream: dump_file, format: "\n");
2212 }
2213
2214 /* Propagate into all uses and fold those stmts. */
2215 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2216 replace_uses_by (lhs, result);
2217 else
2218 replace_call_with_value (&i, result);
2219 }
2220 }
2221
2222 fini_object_sizes ();
2223 return todo;
2224}
2225
2226/* Simple pass to optimize all __builtin_object_size () builtins. */
2227
2228namespace {
2229
2230const pass_data pass_data_object_sizes =
2231{
2232 .type: GIMPLE_PASS, /* type */
2233 .name: "objsz", /* name */
2234 .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */
2235 .tv_id: TV_NONE, /* tv_id */
2236 .properties_required: ( PROP_cfg | PROP_ssa ), /* properties_required */
2237 PROP_objsz, /* properties_provided */
2238 .properties_destroyed: 0, /* properties_destroyed */
2239 .todo_flags_start: 0, /* todo_flags_start */
2240 .todo_flags_finish: 0, /* todo_flags_finish */
2241};
2242
2243class pass_object_sizes : public gimple_opt_pass
2244{
2245public:
2246 pass_object_sizes (gcc::context *ctxt)
2247 : gimple_opt_pass (pass_data_object_sizes, ctxt)
2248 {}
2249
2250 /* opt_pass methods: */
2251 opt_pass * clone () final override { return new pass_object_sizes (m_ctxt); }
2252 unsigned int execute (function *fun) final override
2253 {
2254 return object_sizes_execute (fun, early: false);
2255 }
2256}; // class pass_object_sizes
2257
2258} // anon namespace
2259
2260gimple_opt_pass *
2261make_pass_object_sizes (gcc::context *ctxt)
2262{
2263 return new pass_object_sizes (ctxt);
2264}
2265
2266/* Early version of pass to optimize all __builtin_object_size () builtins. */
2267
2268namespace {
2269
2270const pass_data pass_data_early_object_sizes =
2271{
2272 .type: GIMPLE_PASS, /* type */
2273 .name: "early_objsz", /* name */
2274 .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */
2275 .tv_id: TV_NONE, /* tv_id */
2276 .properties_required: ( PROP_cfg | PROP_ssa ), /* properties_required */
2277 .properties_provided: 0, /* properties_provided */
2278 .properties_destroyed: 0, /* properties_destroyed */
2279 .todo_flags_start: 0, /* todo_flags_start */
2280 .todo_flags_finish: 0, /* todo_flags_finish */
2281};
2282
2283class pass_early_object_sizes : public gimple_opt_pass
2284{
2285public:
2286 pass_early_object_sizes (gcc::context *ctxt)
2287 : gimple_opt_pass (pass_data_early_object_sizes, ctxt)
2288 {}
2289
2290 /* opt_pass methods: */
2291 unsigned int execute (function *fun) final override
2292 {
2293 return object_sizes_execute (fun, early: true);
2294 }
2295}; // class pass_object_sizes
2296
2297} // anon namespace
2298
2299gimple_opt_pass *
2300make_pass_early_object_sizes (gcc::context *ctxt)
2301{
2302 return new pass_early_object_sizes (ctxt);
2303}
2304

source code of gcc/tree-object-size.cc