1 | /* Definitions of the pointer_query and related classes. |
2 | |
3 | Copyright (C) 2020-2024 Free Software Foundation, Inc. |
4 | |
5 | This file is part of GCC. |
6 | |
7 | GCC is free software; you can redistribute it and/or modify it under |
8 | the terms of the GNU General Public License as published by the Free |
9 | Software Foundation; either version 3, or (at your option) any later |
10 | version. |
11 | |
12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
15 | for more details. |
16 | |
17 | You should have received a copy of the GNU General Public License |
18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ |
20 | |
21 | #include "config.h" |
22 | #include "system.h" |
23 | #include "coretypes.h" |
24 | #include "backend.h" |
25 | #include "tree.h" |
26 | #include "gimple.h" |
27 | #include "stringpool.h" |
28 | #include "tree-vrp.h" |
29 | #include "diagnostic-core.h" |
30 | #include "fold-const.h" |
31 | #include "tree-object-size.h" |
32 | #include "tree-ssa-strlen.h" |
33 | #include "langhooks.h" |
34 | #include "stringpool.h" |
35 | #include "attribs.h" |
36 | #include "gimple-iterator.h" |
37 | #include "gimple-fold.h" |
38 | #include "gimple-ssa.h" |
39 | #include "intl.h" |
40 | #include "attr-fnspec.h" |
41 | #include "gimple-range.h" |
42 | #include "pointer-query.h" |
43 | #include "tree-pretty-print.h" |
44 | #include "tree-ssanames.h" |
45 | #include "target.h" |
46 | |
47 | static bool compute_objsize_r (tree, gimple *, bool, int, access_ref *, |
48 | ssa_name_limit_t &, pointer_query *); |
49 | |
50 | /* Wrapper around the wide_int overload of get_range that accepts |
51 | offset_int instead. For middle end expressions returns the same |
52 | result. For a subset of nonconstamt expressions emitted by the front |
53 | end determines a more precise range than would be possible otherwise. */ |
54 | |
55 | static bool |
56 | get_offset_range (tree x, gimple *stmt, offset_int r[2], range_query *rvals) |
57 | { |
58 | offset_int add = 0; |
59 | if (TREE_CODE (x) == PLUS_EXPR) |
60 | { |
61 | /* Handle constant offsets in pointer addition expressions seen |
62 | n the front end IL. */ |
63 | tree op = TREE_OPERAND (x, 1); |
64 | if (TREE_CODE (op) == INTEGER_CST) |
65 | { |
66 | op = fold_convert (signed_type_for (TREE_TYPE (op)), op); |
67 | add = wi::to_offset (t: op); |
68 | x = TREE_OPERAND (x, 0); |
69 | } |
70 | } |
71 | |
72 | if (TREE_CODE (x) == NOP_EXPR) |
73 | /* Also handle conversions to sizetype seen in the front end IL. */ |
74 | x = TREE_OPERAND (x, 0); |
75 | |
76 | tree type = TREE_TYPE (x); |
77 | if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type)) |
78 | return false; |
79 | |
80 | if (TREE_CODE (x) != INTEGER_CST |
81 | && TREE_CODE (x) != SSA_NAME) |
82 | { |
83 | if (TYPE_UNSIGNED (type) |
84 | && TYPE_PRECISION (type) == TYPE_PRECISION (sizetype)) |
85 | type = signed_type_for (type); |
86 | |
87 | r[0] = wi::to_offset (TYPE_MIN_VALUE (type)) + add; |
88 | r[1] = wi::to_offset (TYPE_MAX_VALUE (type)) + add; |
89 | return x; |
90 | } |
91 | |
92 | wide_int wr[2]; |
93 | if (!get_range (x, stmt, wr, rvals)) |
94 | return false; |
95 | |
96 | signop sgn = SIGNED; |
97 | /* Only convert signed integers or unsigned sizetype to a signed |
98 | offset and avoid converting large positive values in narrower |
99 | types to negative offsets. */ |
100 | if (TYPE_UNSIGNED (type) |
101 | && wr[0].get_precision () < TYPE_PRECISION (sizetype)) |
102 | sgn = UNSIGNED; |
103 | |
104 | r[0] = offset_int::from (x: wr[0], sgn); |
105 | r[1] = offset_int::from (x: wr[1], sgn); |
106 | return true; |
107 | } |
108 | |
109 | /* Return the argument that the call STMT to a built-in function returns |
110 | or null if it doesn't. On success, set OFFRNG[] to the range of offsets |
111 | from the argument reflected in the value returned by the built-in if it |
112 | can be determined, otherwise to 0 and HWI_M1U respectively. Set |
113 | *PAST_END for functions like mempcpy that might return a past the end |
114 | pointer (most functions return a dereferenceable pointer to an existing |
115 | element of an array). */ |
116 | |
117 | static tree |
118 | gimple_call_return_array (gimple *stmt, offset_int offrng[2], bool *past_end, |
119 | ssa_name_limit_t &snlim, pointer_query *qry) |
120 | { |
121 | /* Clear and set below for the rare function(s) that might return |
122 | a past-the-end pointer. */ |
123 | *past_end = false; |
124 | |
125 | { |
126 | /* Check for attribute fn spec to see if the function returns one |
127 | of its arguments. */ |
128 | attr_fnspec fnspec = gimple_call_fnspec (stmt: as_a <gcall *>(p: stmt)); |
129 | unsigned int argno; |
130 | if (fnspec.returns_arg (arg_no: &argno)) |
131 | { |
132 | /* Functions return the first argument (not a range). */ |
133 | offrng[0] = offrng[1] = 0; |
134 | return gimple_call_arg (gs: stmt, index: argno); |
135 | } |
136 | } |
137 | |
138 | if (gimple_call_num_args (gs: stmt) < 1) |
139 | return NULL_TREE; |
140 | |
141 | tree fn = gimple_call_fndecl (gs: stmt); |
142 | if (!gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)) |
143 | { |
144 | /* See if this is a call to placement new. */ |
145 | if (!fn |
146 | || !DECL_IS_OPERATOR_NEW_P (fn) |
147 | || DECL_IS_REPLACEABLE_OPERATOR_NEW_P (fn)) |
148 | return NULL_TREE; |
149 | |
150 | /* Check the mangling, keeping in mind that operator new takes |
151 | a size_t which could be unsigned int or unsigned long. */ |
152 | tree fname = DECL_ASSEMBLER_NAME (fn); |
153 | if (!id_equal (id: fname, str: "_ZnwjPv" ) // ordinary form |
154 | && !id_equal (id: fname, str: "_ZnwmPv" ) // ordinary form |
155 | && !id_equal (id: fname, str: "_ZnajPv" ) // array form |
156 | && !id_equal (id: fname, str: "_ZnamPv" )) // array form |
157 | return NULL_TREE; |
158 | |
159 | if (gimple_call_num_args (gs: stmt) != 2) |
160 | return NULL_TREE; |
161 | |
162 | /* Allocation functions return a pointer to the beginning. */ |
163 | offrng[0] = offrng[1] = 0; |
164 | return gimple_call_arg (gs: stmt, index: 1); |
165 | } |
166 | |
167 | switch (DECL_FUNCTION_CODE (decl: fn)) |
168 | { |
169 | case BUILT_IN_MEMCPY: |
170 | case BUILT_IN_MEMCPY_CHK: |
171 | case BUILT_IN_MEMMOVE: |
172 | case BUILT_IN_MEMMOVE_CHK: |
173 | case BUILT_IN_MEMSET: |
174 | case BUILT_IN_STRCAT: |
175 | case BUILT_IN_STRCAT_CHK: |
176 | case BUILT_IN_STRCPY: |
177 | case BUILT_IN_STRCPY_CHK: |
178 | case BUILT_IN_STRNCAT: |
179 | case BUILT_IN_STRNCAT_CHK: |
180 | case BUILT_IN_STRNCPY: |
181 | case BUILT_IN_STRNCPY_CHK: |
182 | /* Functions return the first argument (not a range). */ |
183 | offrng[0] = offrng[1] = 0; |
184 | return gimple_call_arg (gs: stmt, index: 0); |
185 | |
186 | case BUILT_IN_MEMPCPY: |
187 | case BUILT_IN_MEMPCPY_CHK: |
188 | { |
189 | /* The returned pointer is in a range constrained by the smaller |
190 | of the upper bound of the size argument and the source object |
191 | size. */ |
192 | offrng[0] = 0; |
193 | offrng[1] = HOST_WIDE_INT_M1U; |
194 | tree off = gimple_call_arg (gs: stmt, index: 2); |
195 | bool off_valid = get_offset_range (x: off, stmt, r: offrng, rvals: qry->rvals); |
196 | if (!off_valid || offrng[0] != offrng[1]) |
197 | { |
198 | /* If the offset is either indeterminate or in some range, |
199 | try to constrain its upper bound to at most the size |
200 | of the source object. */ |
201 | access_ref aref; |
202 | tree src = gimple_call_arg (gs: stmt, index: 1); |
203 | if (compute_objsize_r (src, stmt, false, 1, &aref, snlim, qry) |
204 | && aref.sizrng[1] < offrng[1]) |
205 | offrng[1] = aref.sizrng[1]; |
206 | } |
207 | |
208 | /* Mempcpy may return a past-the-end pointer. */ |
209 | *past_end = true; |
210 | return gimple_call_arg (gs: stmt, index: 0); |
211 | } |
212 | |
213 | case BUILT_IN_MEMCHR: |
214 | { |
215 | tree off = gimple_call_arg (gs: stmt, index: 2); |
216 | if (get_offset_range (x: off, stmt, r: offrng, rvals: qry->rvals)) |
217 | offrng[1] -= 1; |
218 | else |
219 | offrng[1] = HOST_WIDE_INT_M1U; |
220 | |
221 | offrng[0] = 0; |
222 | return gimple_call_arg (gs: stmt, index: 0); |
223 | } |
224 | |
225 | case BUILT_IN_STRCHR: |
226 | case BUILT_IN_STRRCHR: |
227 | case BUILT_IN_STRSTR: |
228 | offrng[0] = 0; |
229 | offrng[1] = HOST_WIDE_INT_M1U; |
230 | return gimple_call_arg (gs: stmt, index: 0); |
231 | |
232 | case BUILT_IN_STPCPY: |
233 | case BUILT_IN_STPCPY_CHK: |
234 | { |
235 | access_ref aref; |
236 | tree src = gimple_call_arg (gs: stmt, index: 1); |
237 | if (compute_objsize_r (src, stmt, false, 1, &aref, snlim, qry)) |
238 | offrng[1] = aref.sizrng[1] - 1; |
239 | else |
240 | offrng[1] = HOST_WIDE_INT_M1U; |
241 | |
242 | offrng[0] = 0; |
243 | return gimple_call_arg (gs: stmt, index: 0); |
244 | } |
245 | |
246 | case BUILT_IN_STPNCPY: |
247 | case BUILT_IN_STPNCPY_CHK: |
248 | { |
249 | /* The returned pointer is in a range between the first argument |
250 | and it plus the smaller of the upper bound of the size argument |
251 | and the source object size. */ |
252 | offrng[1] = HOST_WIDE_INT_M1U; |
253 | tree off = gimple_call_arg (gs: stmt, index: 2); |
254 | if (!get_offset_range (x: off, stmt, r: offrng, rvals: qry->rvals) |
255 | || offrng[0] != offrng[1]) |
256 | { |
257 | /* If the offset is either indeterminate or in some range, |
258 | try to constrain its upper bound to at most the size |
259 | of the source object. */ |
260 | access_ref aref; |
261 | tree src = gimple_call_arg (gs: stmt, index: 1); |
262 | if (compute_objsize_r (src, stmt, false, 1, &aref, snlim, qry) |
263 | && aref.sizrng[1] < offrng[1]) |
264 | offrng[1] = aref.sizrng[1]; |
265 | } |
266 | |
267 | /* When the source is the empty string the returned pointer is |
268 | a copy of the argument. Otherwise stpcpy can also return |
269 | a past-the-end pointer. */ |
270 | offrng[0] = 0; |
271 | *past_end = true; |
272 | return gimple_call_arg (gs: stmt, index: 0); |
273 | } |
274 | |
275 | default: |
276 | break; |
277 | } |
278 | |
279 | return NULL_TREE; |
280 | } |
281 | |
282 | /* Return true when EXP's range can be determined and set RANGE[] to it |
283 | after adjusting it if necessary to make EXP a represents a valid size |
284 | of object, or a valid size argument to an allocation function declared |
285 | with attribute alloc_size (whose argument may be signed), or to a string |
286 | manipulation function like memset. |
287 | When ALLOW_ZERO is set in FLAGS, allow returning a range of [0, 0] for |
288 | a size in an anti-range [1, N] where N > PTRDIFF_MAX. A zero range is |
289 | a (nearly) invalid argument to allocation functions like malloc but it |
290 | is a valid argument to functions like memset. |
291 | When USE_LARGEST is set in FLAGS set RANGE to the largest valid subrange |
292 | in a multi-range, otherwise to the smallest valid subrange. */ |
293 | |
294 | bool |
295 | get_size_range (range_query *query, tree exp, gimple *stmt, tree range[2], |
296 | int flags /* = 0 */) |
297 | { |
298 | if (!exp) |
299 | return false; |
300 | |
301 | if (tree_fits_uhwi_p (exp)) |
302 | { |
303 | /* EXP is a constant. */ |
304 | range[0] = range[1] = exp; |
305 | return true; |
306 | } |
307 | |
308 | tree exptype = TREE_TYPE (exp); |
309 | bool integral = INTEGRAL_TYPE_P (exptype); |
310 | |
311 | wide_int min, max; |
312 | enum value_range_kind range_type; |
313 | |
314 | if (!query) |
315 | query = get_range_query (cfun); |
316 | |
317 | if (integral) |
318 | { |
319 | value_range vr; |
320 | tree tmin, tmax; |
321 | |
322 | query->range_of_expr (r&: vr, expr: exp, stmt); |
323 | |
324 | if (vr.undefined_p ()) |
325 | vr.set_varying (TREE_TYPE (exp)); |
326 | range_type = get_legacy_range (vr, min&: tmin, max&: tmax); |
327 | min = wi::to_wide (t: tmin); |
328 | max = wi::to_wide (t: tmax); |
329 | } |
330 | else |
331 | range_type = VR_VARYING; |
332 | |
333 | if (range_type == VR_VARYING) |
334 | { |
335 | if (integral) |
336 | { |
337 | /* Use the full range of the type of the expression when |
338 | no value range information is available. */ |
339 | range[0] = TYPE_MIN_VALUE (exptype); |
340 | range[1] = TYPE_MAX_VALUE (exptype); |
341 | return true; |
342 | } |
343 | |
344 | range[0] = NULL_TREE; |
345 | range[1] = NULL_TREE; |
346 | return false; |
347 | } |
348 | |
349 | unsigned expprec = TYPE_PRECISION (exptype); |
350 | |
351 | bool signed_p = !TYPE_UNSIGNED (exptype); |
352 | |
353 | if (range_type == VR_ANTI_RANGE) |
354 | { |
355 | if (signed_p) |
356 | { |
357 | if (wi::les_p (x: max, y: 0)) |
358 | { |
359 | /* EXP is not in a strictly negative range. That means |
360 | it must be in some (not necessarily strictly) positive |
361 | range which includes zero. Since in signed to unsigned |
362 | conversions negative values end up converted to large |
363 | positive values, and otherwise they are not valid sizes, |
364 | the resulting range is in both cases [0, TYPE_MAX]. */ |
365 | min = wi::zero (precision: expprec); |
366 | max = wi::to_wide (TYPE_MAX_VALUE (exptype)); |
367 | } |
368 | else if (wi::les_p (x: min - 1, y: 0)) |
369 | { |
370 | /* EXP is not in a negative-positive range. That means EXP |
371 | is either negative, or greater than max. Since negative |
372 | sizes are invalid make the range [MAX + 1, TYPE_MAX]. */ |
373 | min = max + 1; |
374 | max = wi::to_wide (TYPE_MAX_VALUE (exptype)); |
375 | } |
376 | else |
377 | { |
378 | max = min - 1; |
379 | min = wi::zero (precision: expprec); |
380 | } |
381 | } |
382 | else |
383 | { |
384 | wide_int maxsize = wi::to_wide (t: max_object_size ()); |
385 | min = wide_int::from (x: min, precision: maxsize.get_precision (), sgn: UNSIGNED); |
386 | max = wide_int::from (x: max, precision: maxsize.get_precision (), sgn: UNSIGNED); |
387 | if (wi::eq_p (x: 0, y: min - 1)) |
388 | { |
389 | /* EXP is unsigned and not in the range [1, MAX]. That means |
390 | it's either zero or greater than MAX. Even though 0 would |
391 | normally be detected by -Walloc-zero, unless ALLOW_ZERO |
392 | is set, set the range to [MAX, TYPE_MAX] so that when MAX |
393 | is greater than the limit the whole range is diagnosed. */ |
394 | wide_int maxsize = wi::to_wide (t: max_object_size ()); |
395 | if (flags & SR_ALLOW_ZERO) |
396 | { |
397 | if (wi::leu_p (x: maxsize, y: max + 1) |
398 | || !(flags & SR_USE_LARGEST)) |
399 | min = max = wi::zero (precision: expprec); |
400 | else |
401 | { |
402 | min = max + 1; |
403 | max = wi::to_wide (TYPE_MAX_VALUE (exptype)); |
404 | } |
405 | } |
406 | else |
407 | { |
408 | min = max + 1; |
409 | max = wi::to_wide (TYPE_MAX_VALUE (exptype)); |
410 | } |
411 | } |
412 | else if ((flags & SR_USE_LARGEST) |
413 | && wi::ltu_p (x: max + 1, y: maxsize)) |
414 | { |
415 | /* When USE_LARGEST is set and the larger of the two subranges |
416 | is a valid size, use it... */ |
417 | min = max + 1; |
418 | max = maxsize; |
419 | } |
420 | else |
421 | { |
422 | /* ...otherwise use the smaller subrange. */ |
423 | max = min - 1; |
424 | min = wi::zero (precision: expprec); |
425 | } |
426 | } |
427 | } |
428 | |
429 | range[0] = wide_int_to_tree (type: exptype, cst: min); |
430 | range[1] = wide_int_to_tree (type: exptype, cst: max); |
431 | |
432 | return true; |
433 | } |
434 | |
435 | bool |
436 | get_size_range (tree exp, tree range[2], int flags /* = 0 */) |
437 | { |
438 | return get_size_range (/*query=*/NULL, exp, /*stmt=*/NULL, range, flags); |
439 | } |
440 | |
441 | /* If STMT is a call to an allocation function, returns the constant |
442 | maximum size of the object allocated by the call represented as |
443 | sizetype. If nonnull, sets RNG1[] to the range of the size. |
444 | When nonnull, uses RVALS for range information, otherwise gets global |
445 | range info. |
446 | Returns null when STMT is not a call to a valid allocation function. */ |
447 | |
448 | tree |
449 | gimple_call_alloc_size (gimple *stmt, wide_int rng1[2] /* = NULL */, |
450 | range_query *qry /* = NULL */) |
451 | { |
452 | if (!stmt || !is_gimple_call (gs: stmt)) |
453 | return NULL_TREE; |
454 | |
455 | tree allocfntype; |
456 | if (tree fndecl = gimple_call_fndecl (gs: stmt)) |
457 | allocfntype = TREE_TYPE (fndecl); |
458 | else |
459 | allocfntype = gimple_call_fntype (gs: stmt); |
460 | |
461 | if (!allocfntype) |
462 | return NULL_TREE; |
463 | |
464 | unsigned argidx1 = UINT_MAX, argidx2 = UINT_MAX; |
465 | tree at = lookup_attribute (attr_name: "alloc_size" , TYPE_ATTRIBUTES (allocfntype)); |
466 | if (!at) |
467 | { |
468 | if (!gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN)) |
469 | return NULL_TREE; |
470 | |
471 | argidx1 = 0; |
472 | } |
473 | |
474 | unsigned nargs = gimple_call_num_args (gs: stmt); |
475 | |
476 | if (argidx1 == UINT_MAX) |
477 | { |
478 | tree atval = TREE_VALUE (at); |
479 | if (!atval) |
480 | return NULL_TREE; |
481 | |
482 | argidx1 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1; |
483 | if (nargs <= argidx1) |
484 | return NULL_TREE; |
485 | |
486 | atval = TREE_CHAIN (atval); |
487 | if (atval) |
488 | { |
489 | argidx2 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1; |
490 | if (nargs <= argidx2) |
491 | return NULL_TREE; |
492 | } |
493 | } |
494 | |
495 | tree size = gimple_call_arg (gs: stmt, index: argidx1); |
496 | |
497 | wide_int rng1_buf[2]; |
498 | /* If RNG1 is not set, use the buffer. */ |
499 | if (!rng1) |
500 | rng1 = rng1_buf; |
501 | |
502 | /* Use maximum precision to avoid overflow below. */ |
503 | const int prec = ADDR_MAX_PRECISION; |
504 | |
505 | { |
506 | tree r[2]; |
507 | /* Determine the largest valid range size, including zero. */ |
508 | if (!get_size_range (query: qry, exp: size, stmt, range: r, flags: SR_ALLOW_ZERO | SR_USE_LARGEST)) |
509 | return NULL_TREE; |
510 | rng1[0] = wi::to_wide (t: r[0], prec); |
511 | rng1[1] = wi::to_wide (t: r[1], prec); |
512 | } |
513 | |
514 | if (argidx2 > nargs && TREE_CODE (size) == INTEGER_CST) |
515 | return fold_convert (sizetype, size); |
516 | |
517 | /* To handle ranges do the math in wide_int and return the product |
518 | of the upper bounds as a constant. Ignore anti-ranges. */ |
519 | tree n = argidx2 < nargs ? gimple_call_arg (gs: stmt, index: argidx2) : integer_one_node; |
520 | wide_int rng2[2]; |
521 | { |
522 | tree r[2]; |
523 | /* As above, use the full non-negative range on failure. */ |
524 | if (!get_size_range (query: qry, exp: n, stmt, range: r, flags: SR_ALLOW_ZERO | SR_USE_LARGEST)) |
525 | return NULL_TREE; |
526 | rng2[0] = wi::to_wide (t: r[0], prec); |
527 | rng2[1] = wi::to_wide (t: r[1], prec); |
528 | } |
529 | |
530 | /* Compute products of both bounds for the caller but return the lesser |
531 | of SIZE_MAX and the product of the upper bounds as a constant. */ |
532 | rng1[0] = rng1[0] * rng2[0]; |
533 | rng1[1] = rng1[1] * rng2[1]; |
534 | |
535 | const tree size_max = TYPE_MAX_VALUE (sizetype); |
536 | if (wi::gtu_p (x: rng1[1], y: wi::to_wide (t: size_max, prec))) |
537 | { |
538 | rng1[1] = wi::to_wide (t: size_max, prec); |
539 | return size_max; |
540 | } |
541 | |
542 | return wide_int_to_tree (sizetype, cst: rng1[1]); |
543 | } |
544 | |
545 | /* For an access to an object referenced to by the function parameter PTR |
546 | of pointer type, and set RNG[] to the range of sizes of the object |
547 | obtainedfrom the attribute access specification for the current function. |
548 | Set STATIC_ARRAY if the array parameter has been declared [static]. |
549 | Return the function parameter on success and null otherwise. */ |
550 | |
551 | static tree |
552 | gimple_parm_array_size (tree ptr, wide_int rng[2], |
553 | bool *static_array /* = NULL */) |
554 | { |
555 | /* For a function argument try to determine the byte size of the array |
556 | from the current function declaratation (e.g., attribute access or |
557 | related). */ |
558 | tree var = SSA_NAME_VAR (ptr); |
559 | if (TREE_CODE (var) != PARM_DECL || !POINTER_TYPE_P (TREE_TYPE (var))) |
560 | return NULL_TREE; |
561 | |
562 | const unsigned prec = TYPE_PRECISION (sizetype); |
563 | |
564 | rdwr_map rdwr_idx; |
565 | attr_access *access = get_parm_access (rdwr_idx, var); |
566 | if (!access) |
567 | return NULL_TREE; |
568 | |
569 | if (access->sizarg != UINT_MAX) |
570 | { |
571 | /* TODO: Try to extract the range from the argument based on |
572 | those of subsequent assertions or based on known calls to |
573 | the current function. */ |
574 | return NULL_TREE; |
575 | } |
576 | |
577 | if (!access->minsize) |
578 | return NULL_TREE; |
579 | |
580 | /* Only consider ordinary array bound at level 2 (or above if it's |
581 | ever added). */ |
582 | if (warn_array_parameter < 2 && !access->static_p) |
583 | return NULL_TREE; |
584 | |
585 | if (static_array) |
586 | *static_array = access->static_p; |
587 | |
588 | rng[0] = wi::zero (precision: prec); |
589 | rng[1] = wi::uhwi (val: access->minsize, precision: prec); |
590 | /* Multiply the array bound encoded in the attribute by the size |
591 | of what the pointer argument to which it decays points to. */ |
592 | tree eltype = TREE_TYPE (TREE_TYPE (ptr)); |
593 | tree size = TYPE_SIZE_UNIT (eltype); |
594 | if (!size || TREE_CODE (size) != INTEGER_CST) |
595 | return NULL_TREE; |
596 | |
597 | rng[1] *= wi::to_wide (t: size, prec); |
598 | return var; |
599 | } |
600 | |
601 | /* Initialize the object. */ |
602 | |
603 | access_ref::access_ref () |
604 | : ref (), eval ([](tree x){ return x; }), deref (), ref_nullptr_p (false), |
605 | trail1special (true), base0 (true), parmarray () |
606 | { |
607 | /* Set to valid. */ |
608 | offrng[0] = offrng[1] = 0; |
609 | offmax[0] = offmax[1] = 0; |
610 | /* Invalidate. */ |
611 | sizrng[0] = sizrng[1] = -1; |
612 | } |
613 | |
614 | /* Return the PHI node REF refers to or null if it doesn't. */ |
615 | |
616 | gphi * |
617 | access_ref::phi () const |
618 | { |
619 | if (!ref || TREE_CODE (ref) != SSA_NAME) |
620 | return NULL; |
621 | |
622 | gimple *def_stmt = SSA_NAME_DEF_STMT (ref); |
623 | if (!def_stmt || gimple_code (g: def_stmt) != GIMPLE_PHI) |
624 | return NULL; |
625 | |
626 | return as_a <gphi *> (p: def_stmt); |
627 | } |
628 | |
629 | /* Determine the size and offset for ARG, append it to ALL_REFS, and |
630 | merge the result with *THIS. Ignore ARG if SKIP_NULL is set and |
631 | ARG refers to the null pointer. Return true on success and false |
632 | on failure. */ |
633 | |
634 | void |
635 | access_ref::merge_ref (vec<access_ref> *all_refs, tree arg, gimple *stmt, |
636 | int ostype, bool skip_null, |
637 | ssa_name_limit_t &snlim, pointer_query &qry) |
638 | { |
639 | access_ref aref; |
640 | if (!compute_objsize_r (arg, stmt, false, ostype, &aref, snlim, &qry) |
641 | || aref.sizrng[0] < 0) |
642 | { |
643 | /* This may be a PHI with all null pointer arguments. Handle it |
644 | conservatively by setting all properties to the most permissive |
645 | values. */ |
646 | base0 = false; |
647 | offrng[0] = offrng[1] = 0; |
648 | add_max_offset (); |
649 | set_max_size_range (); |
650 | return; |
651 | } |
652 | |
653 | if (all_refs) |
654 | { |
655 | access_ref dummy_ref; |
656 | aref.get_ref (all_refs, &dummy_ref, ostype, &snlim, &qry); |
657 | } |
658 | |
659 | if (TREE_CODE (arg) == SSA_NAME) |
660 | qry.put_ref (arg, aref, ostype); |
661 | |
662 | if (all_refs) |
663 | all_refs->safe_push (obj: aref); |
664 | |
665 | aref.deref += deref; |
666 | |
667 | bool merged_parmarray = aref.parmarray; |
668 | |
669 | const bool nullp = skip_null && integer_zerop (arg); |
670 | const offset_int maxobjsize = wi::to_offset (t: max_object_size ()); |
671 | offset_int minsize = sizrng[0]; |
672 | |
673 | if (sizrng[0] < 0) |
674 | { |
675 | /* If *THIS doesn't contain a meaningful result yet set it to AREF |
676 | unless the argument is null and it's okay to ignore it. */ |
677 | if (!nullp) |
678 | *this = aref; |
679 | |
680 | /* Set if the current argument refers to one or more objects of |
681 | known size (or range of sizes), as opposed to referring to |
682 | one or more unknown object(s). */ |
683 | const bool arg_known_size = (aref.sizrng[0] != 0 |
684 | || aref.sizrng[1] != maxobjsize); |
685 | if (arg_known_size) |
686 | sizrng[0] = aref.sizrng[0]; |
687 | |
688 | return; |
689 | } |
690 | |
691 | /* Disregard null pointers in PHIs with two or more arguments. |
692 | TODO: Handle this better! */ |
693 | if (nullp) |
694 | return; |
695 | |
696 | const bool known_size = (sizrng[0] != 0 || sizrng[1] != maxobjsize); |
697 | |
698 | if (known_size && aref.sizrng[0] < minsize) |
699 | minsize = aref.sizrng[0]; |
700 | |
701 | /* Extend the size and offset of *THIS to account for AREF. The result |
702 | can be cached but results in false negatives. */ |
703 | |
704 | offset_int orng[2]; |
705 | if (sizrng[1] < aref.sizrng[1]) |
706 | { |
707 | orng[0] = offrng[0]; |
708 | orng[1] = offrng[1]; |
709 | *this = aref; |
710 | } |
711 | else |
712 | { |
713 | orng[0] = aref.offrng[0]; |
714 | orng[1] = aref.offrng[1]; |
715 | } |
716 | |
717 | if (orng[0] < offrng[0]) |
718 | offrng[0] = orng[0]; |
719 | if (offrng[1] < orng[1]) |
720 | offrng[1] = orng[1]; |
721 | |
722 | /* Reset the PHI's BASE0 flag if any of the nonnull arguments |
723 | refers to an object at an unknown offset. */ |
724 | if (!aref.base0) |
725 | base0 = false; |
726 | |
727 | sizrng[0] = minsize; |
728 | parmarray = merged_parmarray; |
729 | |
730 | return; |
731 | } |
732 | |
733 | /* Determine and return the largest object to which *THIS refers. If |
734 | *THIS refers to a PHI and PREF is nonnull, fill *PREF with the details |
735 | of the object determined by compute_objsize(ARG, OSTYPE) for each PHI |
736 | argument ARG. */ |
737 | |
738 | tree |
739 | access_ref::get_ref (vec<access_ref> *all_refs, |
740 | access_ref *pref /* = NULL */, |
741 | int ostype /* = 1 */, |
742 | ssa_name_limit_t *psnlim /* = NULL */, |
743 | pointer_query *qry /* = NULL */) const |
744 | { |
745 | if (!ref || TREE_CODE (ref) != SSA_NAME) |
746 | return NULL; |
747 | |
748 | /* FIXME: Calling get_ref() with a null PSNLIM is dangerous and might |
749 | cause unbounded recursion. */ |
750 | ssa_name_limit_t snlim_buf; |
751 | if (!psnlim) |
752 | psnlim = &snlim_buf; |
753 | |
754 | pointer_query empty_qry; |
755 | if (!qry) |
756 | qry = &empty_qry; |
757 | |
758 | if (gimple *def_stmt = SSA_NAME_DEF_STMT (ref)) |
759 | { |
760 | if (is_gimple_assign (gs: def_stmt)) |
761 | { |
762 | tree_code code = gimple_assign_rhs_code (gs: def_stmt); |
763 | if (code != MIN_EXPR && code != MAX_EXPR) |
764 | return NULL_TREE; |
765 | |
766 | access_ref aref; |
767 | tree arg1 = gimple_assign_rhs1 (gs: def_stmt); |
768 | aref.merge_ref (all_refs, arg: arg1, stmt: def_stmt, ostype, skip_null: false, |
769 | snlim&: *psnlim, qry&: *qry); |
770 | |
771 | tree arg2 = gimple_assign_rhs2 (gs: def_stmt); |
772 | aref.merge_ref (all_refs, arg: arg2, stmt: def_stmt, ostype, skip_null: false, |
773 | snlim&: *psnlim, qry&: *qry); |
774 | |
775 | if (pref && pref != this) |
776 | { |
777 | tree ref = pref->ref; |
778 | *pref = aref; |
779 | pref->ref = ref; |
780 | } |
781 | |
782 | return aref.ref; |
783 | } |
784 | } |
785 | else |
786 | return NULL_TREE; |
787 | |
788 | gphi *phi_stmt = this->phi (); |
789 | if (!phi_stmt) |
790 | return ref; |
791 | |
792 | if (!psnlim->visit_phi (ref)) |
793 | return NULL_TREE; |
794 | |
795 | /* The conservative result of the PHI reflecting the offset and size |
796 | of the largest PHI argument, regardless of whether or not they all |
797 | refer to the same object. */ |
798 | access_ref phi_ref; |
799 | if (pref) |
800 | { |
801 | /* The identity of the object has not been determined yet but |
802 | PREF->REF is set by the caller to the PHI for convenience. |
803 | The size is negative/invalid and the offset is zero (it's |
804 | updated only after the identity of the object has been |
805 | established). */ |
806 | gcc_assert (pref->sizrng[0] < 0); |
807 | gcc_assert (pref->offrng[0] == 0 && pref->offrng[1] == 0); |
808 | |
809 | phi_ref = *pref; |
810 | } |
811 | |
812 | const offset_int maxobjsize = wi::to_offset (t: max_object_size ()); |
813 | const unsigned nargs = gimple_phi_num_args (gs: phi_stmt); |
814 | for (unsigned i = 0; i < nargs; ++i) |
815 | { |
816 | access_ref phi_arg_ref; |
817 | bool skip_null = i || i + 1 < nargs; |
818 | tree arg = gimple_phi_arg_def (gs: phi_stmt, index: i); |
819 | phi_ref.merge_ref (all_refs, arg, stmt: phi_stmt, ostype, skip_null, |
820 | snlim&: *psnlim, qry&: *qry); |
821 | |
822 | if (!phi_ref.base0 |
823 | && phi_ref.sizrng[0] == 0 |
824 | && phi_ref.sizrng[1] >= maxobjsize) |
825 | /* When an argument results in the most permissive result, |
826 | the remaining arguments cannot constrain it. Short-circuit |
827 | the evaluation. */ |
828 | break; |
829 | } |
830 | |
831 | if (phi_ref.sizrng[0] < 0) |
832 | { |
833 | /* Fail if none of the PHI's arguments resulted in updating PHI_REF |
834 | (perhaps because they have all been already visited by prior |
835 | recursive calls). */ |
836 | psnlim->leave_phi (ref); |
837 | return NULL_TREE; |
838 | } |
839 | |
840 | /* Avoid changing *THIS. */ |
841 | if (pref && pref != this) |
842 | { |
843 | /* Keep the SSA_NAME of the PHI unchanged so that all PHI arguments |
844 | can be referred to later if necessary. This is useful even if |
845 | they all refer to the same object. */ |
846 | tree ref = pref->ref; |
847 | *pref = phi_ref; |
848 | pref->ref = ref; |
849 | } |
850 | |
851 | psnlim->leave_phi (ref); |
852 | |
853 | return phi_ref.ref; |
854 | } |
855 | |
856 | /* Return the maximum amount of space remaining and if non-null, set |
857 | argument to the minimum. */ |
858 | |
859 | offset_int |
860 | access_ref::size_remaining (offset_int *pmin /* = NULL */) const |
861 | { |
862 | offset_int minbuf; |
863 | if (!pmin) |
864 | pmin = &minbuf; |
865 | |
866 | if (sizrng[0] < 0) |
867 | { |
868 | /* If the identity of the object hasn't been determined return |
869 | the maximum size range. */ |
870 | *pmin = 0; |
871 | return wi::to_offset (t: max_object_size ()); |
872 | } |
873 | |
874 | /* add_offset() ensures the offset range isn't inverted. */ |
875 | gcc_checking_assert (offrng[0] <= offrng[1]); |
876 | |
877 | if (base0) |
878 | { |
879 | /* The offset into referenced object is zero-based (i.e., it's |
880 | not referenced by a pointer into middle of some unknown object). */ |
881 | if (offrng[0] < 0 && offrng[1] < 0) |
882 | { |
883 | /* If the offset is negative the remaining size is zero. */ |
884 | *pmin = 0; |
885 | return 0; |
886 | } |
887 | |
888 | if (sizrng[1] <= offrng[0]) |
889 | { |
890 | /* If the starting offset is greater than or equal to the upper |
891 | bound on the size of the object, the space remaining is zero. |
892 | As a special case, if it's equal, set *PMIN to -1 to let |
893 | the caller know the offset is valid and just past the end. */ |
894 | *pmin = sizrng[1] == offrng[0] ? -1 : 0; |
895 | return 0; |
896 | } |
897 | |
898 | /* Otherwise return the size minus the lower bound of the offset. */ |
899 | offset_int or0 = offrng[0] < 0 ? 0 : offrng[0]; |
900 | |
901 | *pmin = sizrng[0] - or0; |
902 | return sizrng[1] - or0; |
903 | } |
904 | |
905 | /* The offset to the referenced object isn't zero-based (i.e., it may |
906 | refer to a byte other than the first. The size of such an object |
907 | is constrained only by the size of the address space (the result |
908 | of max_object_size()). */ |
909 | if (sizrng[1] <= offrng[0]) |
910 | { |
911 | *pmin = 0; |
912 | return 0; |
913 | } |
914 | |
915 | offset_int or0 = offrng[0] < 0 ? 0 : offrng[0]; |
916 | |
917 | *pmin = sizrng[0] - or0; |
918 | return sizrng[1] - or0; |
919 | } |
920 | |
921 | /* Return true if the offset and object size are in range for SIZE. */ |
922 | |
923 | bool |
924 | access_ref::offset_in_range (const offset_int &size) const |
925 | { |
926 | if (size_remaining () < size) |
927 | return false; |
928 | |
929 | if (base0) |
930 | return offmax[0] >= 0 && offmax[1] <= sizrng[1]; |
931 | |
932 | offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node)); |
933 | return offmax[0] > -maxoff && offmax[1] < maxoff; |
934 | } |
935 | |
936 | /* Add the range [MIN, MAX] to the offset range. For known objects (with |
937 | zero-based offsets) at least one of whose offset's bounds is in range, |
938 | constrain the other (or both) to the bounds of the object (i.e., zero |
939 | and the upper bound of its size). This improves the quality of |
940 | diagnostics. */ |
941 | |
942 | void access_ref::add_offset (const offset_int &min, const offset_int &max) |
943 | { |
944 | if (min <= max) |
945 | { |
946 | /* To add an ordinary range just add it to the bounds. */ |
947 | offrng[0] += min; |
948 | offrng[1] += max; |
949 | } |
950 | else if (!base0) |
951 | { |
952 | /* To add an inverted range to an offset to an unknown object |
953 | expand it to the maximum. */ |
954 | add_max_offset (); |
955 | return; |
956 | } |
957 | else |
958 | { |
959 | /* To add an inverted range to an offset to an known object set |
960 | the upper bound to the maximum representable offset value |
961 | (which may be greater than MAX_OBJECT_SIZE). |
962 | The lower bound is either the sum of the current offset and |
963 | MIN when abs(MAX) is greater than the former, or zero otherwise. |
964 | Zero because then the inverted range includes the negative of |
965 | the lower bound. */ |
966 | offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node)); |
967 | offrng[1] = maxoff; |
968 | |
969 | if (max >= 0) |
970 | { |
971 | offrng[0] = 0; |
972 | if (offmax[0] > 0) |
973 | offmax[0] = 0; |
974 | return; |
975 | } |
976 | |
977 | offset_int absmax = wi::abs (x: max); |
978 | if (offrng[0] < absmax) |
979 | { |
980 | offrng[0] += min; |
981 | /* Cap the lower bound at the upper (set to MAXOFF above) |
982 | to avoid inadvertently recreating an inverted range. */ |
983 | if (offrng[1] < offrng[0]) |
984 | offrng[0] = offrng[1]; |
985 | } |
986 | else |
987 | offrng[0] = 0; |
988 | } |
989 | |
990 | /* Set the minimum and maximmum computed so far. */ |
991 | if (offrng[1] < 0 && offrng[1] < offmax[0]) |
992 | offmax[0] = offrng[1]; |
993 | if (offrng[0] > 0 && offrng[0] > offmax[1]) |
994 | offmax[1] = offrng[0]; |
995 | |
996 | if (!base0) |
997 | return; |
998 | |
999 | /* When referencing a known object check to see if the offset computed |
1000 | so far is in bounds... */ |
1001 | offset_int remrng[2]; |
1002 | remrng[1] = size_remaining (pmin: remrng); |
1003 | if (remrng[1] > 0 || remrng[0] < 0) |
1004 | { |
1005 | /* ...if so, constrain it so that neither bound exceeds the size of |
1006 | the object. Out of bounds offsets are left unchanged, and, for |
1007 | better or worse, become in bounds later. They should be detected |
1008 | and diagnosed at the point they first become invalid by |
1009 | -Warray-bounds. */ |
1010 | if (offrng[0] < 0) |
1011 | offrng[0] = 0; |
1012 | if (offrng[1] > sizrng[1]) |
1013 | offrng[1] = sizrng[1]; |
1014 | } |
1015 | } |
1016 | |
1017 | /* Issue one inform message describing each target of an access REF. |
1018 | WRITE is set for a write access and clear for a read access. */ |
1019 | |
1020 | void |
1021 | access_ref::inform_access (access_mode mode, int ostype /* = 1 */) const |
1022 | { |
1023 | const access_ref &aref = *this; |
1024 | if (!aref.ref) |
1025 | return; |
1026 | |
1027 | if (phi ()) |
1028 | { |
1029 | /* Set MAXREF to refer to the largest object and fill ALL_REFS |
1030 | with data for all objects referenced by the PHI arguments. */ |
1031 | access_ref maxref; |
1032 | auto_vec<access_ref> all_refs; |
1033 | if (!get_ref (all_refs: &all_refs, pref: &maxref, ostype)) |
1034 | return; |
1035 | |
1036 | if (all_refs.length ()) |
1037 | { |
1038 | /* Except for MAXREF, the rest of the arguments' offsets need not |
1039 | reflect one added to the PHI itself. Determine the latter from |
1040 | MAXREF on which the result is based. */ |
1041 | const offset_int orng[] = |
1042 | { |
1043 | offrng[0] - maxref.offrng[0], |
1044 | wi::smax (x: offrng[1] - maxref.offrng[1], y: offrng[0]), |
1045 | }; |
1046 | |
1047 | /* Add the final PHI's offset to that of each of the arguments |
1048 | and recurse to issue an inform message for it. */ |
1049 | for (unsigned i = 0; i != all_refs.length (); ++i) |
1050 | { |
1051 | /* Skip any PHIs; those could lead to infinite recursion. */ |
1052 | if (all_refs[i].phi ()) |
1053 | continue; |
1054 | |
1055 | all_refs[i].add_offset (min: orng[0], max: orng[1]); |
1056 | all_refs[i].inform_access (mode, ostype); |
1057 | } |
1058 | return; |
1059 | } |
1060 | } |
1061 | |
1062 | /* Convert offset range and avoid including a zero range since it |
1063 | isn't necessarily meaningful. */ |
1064 | HOST_WIDE_INT diff_min = tree_to_shwi (TYPE_MIN_VALUE (ptrdiff_type_node)); |
1065 | HOST_WIDE_INT diff_max = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node)); |
1066 | HOST_WIDE_INT minoff; |
1067 | HOST_WIDE_INT maxoff = diff_max; |
1068 | if (wi::fits_shwi_p (x: aref.offrng[0])) |
1069 | minoff = aref.offrng[0].to_shwi (); |
1070 | else |
1071 | minoff = aref.offrng[0] < 0 ? diff_min : diff_max; |
1072 | |
1073 | if (wi::fits_shwi_p (x: aref.offrng[1])) |
1074 | maxoff = aref.offrng[1].to_shwi (); |
1075 | |
1076 | if (maxoff <= diff_min || maxoff >= diff_max) |
1077 | /* Avoid mentioning an upper bound that's equal to or in excess |
1078 | of the maximum of ptrdiff_t. */ |
1079 | maxoff = minoff; |
1080 | |
1081 | /* Convert size range and always include it since all sizes are |
1082 | meaningful. */ |
1083 | unsigned long long minsize = 0, maxsize = 0; |
1084 | if (wi::fits_shwi_p (x: aref.sizrng[0]) |
1085 | && wi::fits_shwi_p (x: aref.sizrng[1])) |
1086 | { |
1087 | minsize = aref.sizrng[0].to_shwi (); |
1088 | maxsize = aref.sizrng[1].to_shwi (); |
1089 | } |
1090 | |
1091 | /* SIZRNG doesn't necessarily have the same range as the allocation |
1092 | size determined by gimple_call_alloc_size (). */ |
1093 | char sizestr[80]; |
1094 | if (minsize == maxsize) |
1095 | sprintf (s: sizestr, format: "%llu" , minsize); |
1096 | else |
1097 | sprintf (s: sizestr, format: "[%llu, %llu]" , minsize, maxsize); |
1098 | |
1099 | char offstr[80]; |
1100 | if (minoff == 0 |
1101 | && (maxoff == 0 || aref.sizrng[1] <= maxoff)) |
1102 | offstr[0] = '\0'; |
1103 | else if (minoff == maxoff) |
1104 | sprintf (s: offstr, format: "%lli" , (long long) minoff); |
1105 | else |
1106 | sprintf (s: offstr, format: "[%lli, %lli]" , (long long) minoff, (long long) maxoff); |
1107 | |
1108 | location_t loc = UNKNOWN_LOCATION; |
1109 | |
1110 | tree ref = this->ref; |
1111 | tree allocfn = NULL_TREE; |
1112 | if (TREE_CODE (ref) == SSA_NAME) |
1113 | { |
1114 | gimple *stmt = SSA_NAME_DEF_STMT (ref); |
1115 | if (!stmt) |
1116 | return; |
1117 | |
1118 | if (is_gimple_call (gs: stmt)) |
1119 | { |
1120 | loc = gimple_location (g: stmt); |
1121 | if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN)) |
1122 | { |
1123 | /* Strip the SSA_NAME suffix from the variable name and |
1124 | recreate an identifier with the VLA's original name. */ |
1125 | ref = gimple_call_lhs (gs: stmt); |
1126 | if (SSA_NAME_IDENTIFIER (ref)) |
1127 | { |
1128 | ref = SSA_NAME_IDENTIFIER (ref); |
1129 | const char *id = IDENTIFIER_POINTER (ref); |
1130 | size_t len = strcspn (s: id, reject: ".$" ); |
1131 | if (!len) |
1132 | len = strlen (s: id); |
1133 | ref = get_identifier_with_length (id, len); |
1134 | } |
1135 | } |
1136 | else |
1137 | { |
1138 | /* Except for VLAs, retrieve the allocation function. */ |
1139 | allocfn = gimple_call_fndecl (gs: stmt); |
1140 | if (!allocfn) |
1141 | allocfn = gimple_call_fn (gs: stmt); |
1142 | if (TREE_CODE (allocfn) == SSA_NAME) |
1143 | { |
1144 | /* For an ALLOC_CALL via a function pointer make a small |
1145 | effort to determine the destination of the pointer. */ |
1146 | gimple *def = SSA_NAME_DEF_STMT (allocfn); |
1147 | if (gimple_assign_single_p (gs: def)) |
1148 | { |
1149 | tree rhs = gimple_assign_rhs1 (gs: def); |
1150 | if (DECL_P (rhs)) |
1151 | allocfn = rhs; |
1152 | else if (TREE_CODE (rhs) == COMPONENT_REF) |
1153 | allocfn = TREE_OPERAND (rhs, 1); |
1154 | } |
1155 | } |
1156 | } |
1157 | } |
1158 | else if (gimple_nop_p (g: stmt)) |
1159 | /* Handle DECL_PARM below. */ |
1160 | ref = SSA_NAME_VAR (ref); |
1161 | else if (is_gimple_assign (gs: stmt) |
1162 | && (gimple_assign_rhs_code (gs: stmt) == MIN_EXPR |
1163 | || gimple_assign_rhs_code (gs: stmt) == MAX_EXPR)) |
1164 | { |
1165 | /* MIN or MAX_EXPR here implies a reference to a known object |
1166 | and either an unknown or distinct one (the latter being |
1167 | the result of an invalid relational expression). Determine |
1168 | the identity of the former and point to it in the note. |
1169 | TODO: Consider merging with PHI handling. */ |
1170 | access_ref arg_ref[2]; |
1171 | tree arg = gimple_assign_rhs1 (gs: stmt); |
1172 | compute_objsize (ptr: arg, /* ostype = */ 1 , pref: &arg_ref[0]); |
1173 | arg = gimple_assign_rhs2 (gs: stmt); |
1174 | compute_objsize (ptr: arg, /* ostype = */ 1 , pref: &arg_ref[1]); |
1175 | |
1176 | /* Use the argument that references a known object with more |
1177 | space remaining. */ |
1178 | const bool idx |
1179 | = (!arg_ref[0].ref || !arg_ref[0].base0 |
1180 | || (arg_ref[0].base0 && arg_ref[1].base0 |
1181 | && (arg_ref[0].size_remaining () |
1182 | < arg_ref[1].size_remaining ()))); |
1183 | |
1184 | arg_ref[idx].offrng[0] = offrng[0]; |
1185 | arg_ref[idx].offrng[1] = offrng[1]; |
1186 | arg_ref[idx].inform_access (mode); |
1187 | return; |
1188 | } |
1189 | } |
1190 | |
1191 | if (DECL_P (ref)) |
1192 | loc = DECL_SOURCE_LOCATION (ref); |
1193 | else if (EXPR_P (ref) && EXPR_HAS_LOCATION (ref)) |
1194 | loc = EXPR_LOCATION (ref); |
1195 | else if (TREE_CODE (ref) != IDENTIFIER_NODE |
1196 | && TREE_CODE (ref) != SSA_NAME) |
1197 | { |
1198 | if (TREE_CODE (ref) == INTEGER_CST && ref_nullptr_p) |
1199 | { |
1200 | if (mode == access_read_write || mode == access_write_only) |
1201 | inform (loc, "destination object is likely at address zero" ); |
1202 | else |
1203 | inform (loc, "source object is likely at address zero" ); |
1204 | } |
1205 | return; |
1206 | } |
1207 | |
1208 | if (mode == access_read_write || mode == access_write_only) |
1209 | { |
1210 | if (allocfn == NULL_TREE) |
1211 | { |
1212 | if (*offstr) |
1213 | inform (loc, "at offset %s into destination object %qE of size %s" , |
1214 | offstr, ref, sizestr); |
1215 | else |
1216 | inform (loc, "destination object %qE of size %s" , ref, sizestr); |
1217 | return; |
1218 | } |
1219 | |
1220 | if (*offstr) |
1221 | inform (loc, |
1222 | "at offset %s into destination object of size %s " |
1223 | "allocated by %qE" , offstr, sizestr, allocfn); |
1224 | else |
1225 | inform (loc, "destination object of size %s allocated by %qE" , |
1226 | sizestr, allocfn); |
1227 | return; |
1228 | } |
1229 | |
1230 | if (mode == access_read_only) |
1231 | { |
1232 | if (allocfn == NULL_TREE) |
1233 | { |
1234 | if (*offstr) |
1235 | inform (loc, "at offset %s into source object %qE of size %s" , |
1236 | offstr, ref, sizestr); |
1237 | else |
1238 | inform (loc, "source object %qE of size %s" , ref, sizestr); |
1239 | |
1240 | return; |
1241 | } |
1242 | |
1243 | if (*offstr) |
1244 | inform (loc, |
1245 | "at offset %s into source object of size %s allocated by %qE" , |
1246 | offstr, sizestr, allocfn); |
1247 | else |
1248 | inform (loc, "source object of size %s allocated by %qE" , |
1249 | sizestr, allocfn); |
1250 | return; |
1251 | } |
1252 | |
1253 | if (allocfn == NULL_TREE) |
1254 | { |
1255 | if (*offstr) |
1256 | inform (loc, "at offset %s into object %qE of size %s" , |
1257 | offstr, ref, sizestr); |
1258 | else |
1259 | inform (loc, "object %qE of size %s" , ref, sizestr); |
1260 | |
1261 | return; |
1262 | } |
1263 | |
1264 | if (*offstr) |
1265 | inform (loc, |
1266 | "at offset %s into object of size %s allocated by %qE" , |
1267 | offstr, sizestr, allocfn); |
1268 | else |
1269 | inform (loc, "object of size %s allocated by %qE" , |
1270 | sizestr, allocfn); |
1271 | } |
1272 | |
1273 | /* Dump *THIS to FILE. */ |
1274 | |
1275 | void |
1276 | access_ref::dump (FILE *file) const |
1277 | { |
1278 | for (int i = deref; i < 0; ++i) |
1279 | fputc (c: '&', stream: file); |
1280 | |
1281 | for (int i = 0; i < deref; ++i) |
1282 | fputc (c: '*', stream: file); |
1283 | |
1284 | if (gphi *phi_stmt = phi ()) |
1285 | { |
1286 | fputs (s: "PHI <" , stream: file); |
1287 | unsigned nargs = gimple_phi_num_args (gs: phi_stmt); |
1288 | for (unsigned i = 0; i != nargs; ++i) |
1289 | { |
1290 | tree arg = gimple_phi_arg_def (gs: phi_stmt, index: i); |
1291 | print_generic_expr (file, arg); |
1292 | if (i + 1 < nargs) |
1293 | fputs (s: ", " , stream: file); |
1294 | } |
1295 | fputc (c: '>', stream: file); |
1296 | } |
1297 | else |
1298 | print_generic_expr (file, ref); |
1299 | |
1300 | if (offrng[0] != offrng[1]) |
1301 | fprintf (stream: file, format: " + [%lli, %lli]" , |
1302 | (long long) offrng[0].to_shwi (), |
1303 | (long long) offrng[1].to_shwi ()); |
1304 | else if (offrng[0] != 0) |
1305 | fprintf (stream: file, format: " %c %lli" , |
1306 | offrng[0] < 0 ? '-' : '+', |
1307 | (long long) offrng[0].to_shwi ()); |
1308 | |
1309 | if (base0) |
1310 | fputs (s: " (base0)" , stream: file); |
1311 | |
1312 | fputs (s: "; size: " , stream: file); |
1313 | if (sizrng[0] != sizrng[1]) |
1314 | { |
1315 | offset_int maxsize = wi::to_offset (t: max_object_size ()); |
1316 | if (sizrng[0] == 0 && sizrng[1] >= maxsize) |
1317 | fputs (s: "unknown" , stream: file); |
1318 | else |
1319 | fprintf (stream: file, format: "[%llu, %llu]" , |
1320 | (unsigned long long) sizrng[0].to_uhwi (), |
1321 | (unsigned long long) sizrng[1].to_uhwi ()); |
1322 | } |
1323 | else if (sizrng[0] != 0) |
1324 | fprintf (stream: file, format: "%llu" , |
1325 | (unsigned long long) sizrng[0].to_uhwi ()); |
1326 | |
1327 | fputc (c: '\n', stream: file); |
1328 | } |
1329 | |
1330 | /* Set the access to at most MAXWRITE and MAXREAD bytes, and at least 1 |
1331 | when MINWRITE or MINREAD, respectively, is set. */ |
1332 | access_data::access_data (range_query *query, gimple *stmt, access_mode mode, |
1333 | tree maxwrite /* = NULL_TREE */, |
1334 | bool minwrite /* = false */, |
1335 | tree maxread /* = NULL_TREE */, |
1336 | bool minread /* = false */) |
1337 | : stmt (stmt), call (), dst (), src (), mode (mode), ostype () |
1338 | { |
1339 | set_bound (dst_bndrng, maxwrite, minwrite, query, stmt); |
1340 | set_bound (src_bndrng, maxread, minread, query, stmt); |
1341 | } |
1342 | |
1343 | /* Set the access to at most MAXWRITE and MAXREAD bytes, and at least 1 |
1344 | when MINWRITE or MINREAD, respectively, is set. */ |
1345 | access_data::access_data (range_query *query, tree expr, access_mode mode, |
1346 | tree maxwrite /* = NULL_TREE */, |
1347 | bool minwrite /* = false */, |
1348 | tree maxread /* = NULL_TREE */, |
1349 | bool minread /* = false */) |
1350 | : stmt (), call (expr), dst (), src (), mode (mode), ostype () |
1351 | { |
1352 | set_bound (dst_bndrng, maxwrite, minwrite, query, stmt); |
1353 | set_bound (src_bndrng, maxread, minread, query, stmt); |
1354 | } |
1355 | |
1356 | /* Set BNDRNG to the range of BOUND for the statement STMT. */ |
1357 | |
1358 | void |
1359 | access_data::set_bound (offset_int bndrng[2], tree bound, bool minaccess, |
1360 | range_query *query, gimple *stmt) |
1361 | { |
1362 | /* Set the default bounds of the access and adjust below. */ |
1363 | bndrng[0] = minaccess ? 1 : 0; |
1364 | bndrng[1] = HOST_WIDE_INT_M1U; |
1365 | |
1366 | /* When BOUND is nonnull and a range can be extracted from it, |
1367 | set the bounds of the access to reflect both it and MINACCESS. |
1368 | BNDRNG[0] is the size of the minimum access. */ |
1369 | tree rng[2]; |
1370 | if (bound && get_size_range (query, exp: bound, stmt, range: rng, flags: SR_ALLOW_ZERO)) |
1371 | { |
1372 | bndrng[0] = wi::to_offset (t: rng[0]); |
1373 | bndrng[1] = wi::to_offset (t: rng[1]); |
1374 | bndrng[0] = bndrng[0] > 0 && minaccess ? 1 : 0; |
1375 | } |
1376 | } |
1377 | |
1378 | /* Set a bit for the PHI in VISITED and return true if it wasn't |
1379 | already set. */ |
1380 | |
1381 | bool |
1382 | ssa_name_limit_t::visit_phi (tree ssa_name) |
1383 | { |
1384 | if (!visited) |
1385 | visited = BITMAP_ALLOC (NULL); |
1386 | |
1387 | /* Return false if SSA_NAME has already been visited. */ |
1388 | return bitmap_set_bit (visited, SSA_NAME_VERSION (ssa_name)); |
1389 | } |
1390 | |
1391 | /* Clear a bit for the PHI in VISITED. */ |
1392 | |
1393 | void |
1394 | ssa_name_limit_t::leave_phi (tree ssa_name) |
1395 | { |
1396 | /* Return false if SSA_NAME has already been visited. */ |
1397 | bitmap_clear_bit (visited, SSA_NAME_VERSION (ssa_name)); |
1398 | } |
1399 | |
1400 | /* Return false if the SSA_NAME chain length counter has reached |
1401 | the limit, otherwise increment the counter and return true. */ |
1402 | |
1403 | bool |
1404 | ssa_name_limit_t::next () |
1405 | { |
1406 | /* Return a negative value to let caller avoid recursing beyond |
1407 | the specified limit. */ |
1408 | if (ssa_def_max == 0) |
1409 | return false; |
1410 | |
1411 | --ssa_def_max; |
1412 | return true; |
1413 | } |
1414 | |
1415 | /* If the SSA_NAME has already been "seen" return a positive value. |
1416 | Otherwise add it to VISITED. If the SSA_NAME limit has been |
1417 | reached, return a negative value. Otherwise return zero. */ |
1418 | |
1419 | int |
1420 | ssa_name_limit_t::next_phi (tree ssa_name) |
1421 | { |
1422 | { |
1423 | gimple *def_stmt = SSA_NAME_DEF_STMT (ssa_name); |
1424 | /* Return a positive value if the PHI has already been visited. */ |
1425 | if (gimple_code (g: def_stmt) == GIMPLE_PHI |
1426 | && !visit_phi (ssa_name)) |
1427 | return 1; |
1428 | } |
1429 | |
1430 | /* Return a negative value to let caller avoid recursing beyond |
1431 | the specified limit. */ |
1432 | if (ssa_def_max == 0) |
1433 | return -1; |
1434 | |
1435 | --ssa_def_max; |
1436 | |
1437 | return 0; |
1438 | } |
1439 | |
1440 | ssa_name_limit_t::~ssa_name_limit_t () |
1441 | { |
1442 | if (visited) |
1443 | BITMAP_FREE (visited); |
1444 | } |
1445 | |
1446 | /* Default ctor. Initialize object with pointers to the range_query |
1447 | instance to use or null. */ |
1448 | |
1449 | pointer_query::pointer_query (range_query *qry /* = NULL */) |
1450 | : rvals (qry), hits (), misses (), failures (), depth (), max_depth (), |
1451 | var_cache () |
1452 | { |
1453 | /* No op. */ |
1454 | } |
1455 | |
1456 | /* Return a pointer to the cached access_ref instance for the SSA_NAME |
1457 | PTR if it's there or null otherwise. */ |
1458 | |
1459 | const access_ref * |
1460 | pointer_query::get_ref (tree ptr, int ostype /* = 1 */) const |
1461 | { |
1462 | unsigned version = SSA_NAME_VERSION (ptr); |
1463 | unsigned idx = version << 1 | (ostype & 1); |
1464 | if (var_cache.indices.length () <= idx) |
1465 | { |
1466 | ++misses; |
1467 | return NULL; |
1468 | } |
1469 | |
1470 | unsigned cache_idx = var_cache.indices[idx]; |
1471 | if (var_cache.access_refs.length () <= cache_idx) |
1472 | { |
1473 | ++misses; |
1474 | return NULL; |
1475 | } |
1476 | |
1477 | const access_ref &cache_ref = var_cache.access_refs[cache_idx]; |
1478 | if (cache_ref.ref) |
1479 | { |
1480 | ++hits; |
1481 | return &cache_ref; |
1482 | } |
1483 | |
1484 | ++misses; |
1485 | return NULL; |
1486 | } |
1487 | |
1488 | /* Retrieve the access_ref instance for a variable from the cache if it's |
1489 | there or compute it and insert it into the cache if it's nonnonull. */ |
1490 | |
1491 | bool |
1492 | pointer_query::get_ref (tree ptr, gimple *stmt, access_ref *pref, |
1493 | int ostype /* = 1 */) |
1494 | { |
1495 | const unsigned version |
1496 | = TREE_CODE (ptr) == SSA_NAME ? SSA_NAME_VERSION (ptr) : 0; |
1497 | |
1498 | if (version) |
1499 | { |
1500 | unsigned idx = version << 1 | (ostype & 1); |
1501 | if (idx < var_cache.indices.length ()) |
1502 | { |
1503 | unsigned cache_idx = var_cache.indices[idx] - 1; |
1504 | if (cache_idx < var_cache.access_refs.length () |
1505 | && var_cache.access_refs[cache_idx].ref) |
1506 | { |
1507 | ++hits; |
1508 | *pref = var_cache.access_refs[cache_idx]; |
1509 | return true; |
1510 | } |
1511 | } |
1512 | |
1513 | ++misses; |
1514 | } |
1515 | |
1516 | if (!compute_objsize (ptr, stmt, ostype, pref, this)) |
1517 | { |
1518 | ++failures; |
1519 | return false; |
1520 | } |
1521 | |
1522 | return true; |
1523 | } |
1524 | |
1525 | /* Add a copy of the access_ref REF for the SSA_NAME to the cache if it's |
1526 | nonnull. */ |
1527 | |
1528 | void |
1529 | pointer_query::put_ref (tree ptr, const access_ref &ref, int ostype /* = 1 */) |
1530 | { |
1531 | /* Only add populated/valid entries. */ |
1532 | if (!ref.ref || ref.sizrng[0] < 0) |
1533 | return; |
1534 | |
1535 | /* Add REF to the two-level cache. */ |
1536 | unsigned version = SSA_NAME_VERSION (ptr); |
1537 | unsigned idx = version << 1 | (ostype & 1); |
1538 | |
1539 | /* Grow INDICES if necessary. An index is valid if it's nonzero. |
1540 | Its value minus one is the index into ACCESS_REFS. Not all |
1541 | entries are valid. */ |
1542 | if (var_cache.indices.length () <= idx) |
1543 | var_cache.indices.safe_grow_cleared (len: idx + 1); |
1544 | |
1545 | if (!var_cache.indices[idx]) |
1546 | var_cache.indices[idx] = var_cache.access_refs.length () + 1; |
1547 | |
1548 | /* Grow ACCESS_REF cache if necessary. An entry is valid if its |
1549 | REF member is nonnull. All entries except for the last two |
1550 | are valid. Once nonnull, the REF value must stay unchanged. */ |
1551 | unsigned cache_idx = var_cache.indices[idx]; |
1552 | if (var_cache.access_refs.length () <= cache_idx) |
1553 | var_cache.access_refs.safe_grow_cleared (len: cache_idx + 1); |
1554 | |
1555 | access_ref &cache_ref = var_cache.access_refs[cache_idx]; |
1556 | if (cache_ref.ref) |
1557 | { |
1558 | gcc_checking_assert (cache_ref.ref == ref.ref); |
1559 | return; |
1560 | } |
1561 | |
1562 | cache_ref = ref; |
1563 | } |
1564 | |
1565 | /* Flush the cache if it's nonnull. */ |
1566 | |
1567 | void |
1568 | pointer_query::flush_cache () |
1569 | { |
1570 | var_cache.indices.release (); |
1571 | var_cache.access_refs.release (); |
1572 | } |
1573 | |
1574 | /* Dump statistics and, optionally, cache contents to DUMP_FILE. */ |
1575 | |
1576 | void |
1577 | pointer_query::dump (FILE *dump_file, bool contents /* = false */) |
1578 | { |
1579 | unsigned nused = 0, nrefs = 0; |
1580 | unsigned nidxs = var_cache.indices.length (); |
1581 | for (unsigned i = 0; i != nidxs; ++i) |
1582 | { |
1583 | unsigned ari = var_cache.indices[i]; |
1584 | if (!ari) |
1585 | continue; |
1586 | |
1587 | ++nused; |
1588 | |
1589 | const access_ref &aref = var_cache.access_refs[ari]; |
1590 | if (!aref.ref) |
1591 | continue; |
1592 | |
1593 | ++nrefs; |
1594 | } |
1595 | |
1596 | fprintf (stream: dump_file, format: "pointer_query counters:\n" |
1597 | " index cache size: %u\n" |
1598 | " index entries: %u\n" |
1599 | " access cache size: %u\n" |
1600 | " access entries: %u\n" |
1601 | " hits: %u\n" |
1602 | " misses: %u\n" |
1603 | " failures: %u\n" |
1604 | " max_depth: %u\n" , |
1605 | nidxs, nused, |
1606 | var_cache.access_refs.length (), nrefs, |
1607 | hits, misses, failures, max_depth); |
1608 | |
1609 | if (!contents || !nidxs) |
1610 | return; |
1611 | |
1612 | fputs (s: "\npointer_query cache contents:\n" , stream: dump_file); |
1613 | |
1614 | for (unsigned i = 0; i != nidxs; ++i) |
1615 | { |
1616 | unsigned ari = var_cache.indices[i]; |
1617 | if (!ari) |
1618 | continue; |
1619 | |
1620 | const access_ref &aref = var_cache.access_refs[ari]; |
1621 | if (!aref.ref) |
1622 | continue; |
1623 | |
1624 | /* The level-1 cache index corresponds to the SSA_NAME_VERSION |
1625 | shifted left by one and ORed with the Object Size Type in |
1626 | the lowest bit. Print the two separately. */ |
1627 | unsigned ver = i >> 1; |
1628 | unsigned ost = i & 1; |
1629 | |
1630 | fprintf (stream: dump_file, format: " %u.%u[%u]: " , ver, ost, ari); |
1631 | if (tree name = ssa_name (ver)) |
1632 | { |
1633 | print_generic_expr (dump_file, name); |
1634 | fputs (s: " = " , stream: dump_file); |
1635 | } |
1636 | else |
1637 | fprintf (stream: dump_file, format: " _%u = " , ver); |
1638 | |
1639 | aref.dump (file: dump_file); |
1640 | } |
1641 | |
1642 | fputc (c: '\n', stream: dump_file); |
1643 | } |
1644 | |
1645 | /* A helper of compute_objsize_r() to determine the size from an assignment |
1646 | statement STMT with the RHS of either MIN_EXPR or MAX_EXPR. On success |
1647 | set PREF->REF to the operand with more or less space remaining, |
1648 | respectively, if both refer to the same (sub)object, or to PTR if they |
1649 | might not, and return true. Otherwise, if the identity of neither |
1650 | operand can be determined, return false. */ |
1651 | |
1652 | static bool |
1653 | handle_min_max_size (tree ptr, int ostype, access_ref *pref, |
1654 | ssa_name_limit_t &snlim, pointer_query *qry) |
1655 | { |
1656 | gimple *stmt = SSA_NAME_DEF_STMT (ptr); |
1657 | const tree_code code = gimple_assign_rhs_code (gs: stmt); |
1658 | |
1659 | /* In a valid MAX_/MIN_EXPR both operands must refer to the same array. |
1660 | Determine the size/offset of each and use the one with more or less |
1661 | space remaining, respectively. If either fails, use the information |
1662 | determined from the other instead, adjusted up or down as appropriate |
1663 | for the expression. */ |
1664 | access_ref aref[2] = { *pref, *pref }; |
1665 | tree arg1 = gimple_assign_rhs1 (gs: stmt); |
1666 | if (!compute_objsize_r (arg1, stmt, false, ostype, &aref[0], snlim, qry)) |
1667 | { |
1668 | aref[0].base0 = false; |
1669 | aref[0].offrng[0] = aref[0].offrng[1] = 0; |
1670 | aref[0].add_max_offset (); |
1671 | aref[0].set_max_size_range (); |
1672 | } |
1673 | |
1674 | tree arg2 = gimple_assign_rhs2 (gs: stmt); |
1675 | if (!compute_objsize_r (arg2, stmt, false, ostype, &aref[1], snlim, qry)) |
1676 | { |
1677 | aref[1].base0 = false; |
1678 | aref[1].offrng[0] = aref[1].offrng[1] = 0; |
1679 | aref[1].add_max_offset (); |
1680 | aref[1].set_max_size_range (); |
1681 | } |
1682 | |
1683 | if (!aref[0].ref && !aref[1].ref) |
1684 | /* Fail if the identity of neither argument could be determined. */ |
1685 | return false; |
1686 | |
1687 | bool i0 = false; |
1688 | if (aref[0].ref && aref[0].base0) |
1689 | { |
1690 | if (aref[1].ref && aref[1].base0) |
1691 | { |
1692 | /* If the object referenced by both arguments has been determined |
1693 | set *PREF to the one with more or less space remainng, whichever |
1694 | is appopriate for CODE. |
1695 | TODO: Indicate when the objects are distinct so it can be |
1696 | diagnosed. */ |
1697 | i0 = code == MAX_EXPR; |
1698 | const bool i1 = !i0; |
1699 | |
1700 | if (aref[i0].size_remaining () < aref[i1].size_remaining ()) |
1701 | *pref = aref[i1]; |
1702 | else |
1703 | *pref = aref[i0]; |
1704 | |
1705 | if (aref[i0].ref != aref[i1].ref) |
1706 | /* If the operands don't refer to the same (sub)object set |
1707 | PREF->REF to the SSA_NAME from which STMT was obtained |
1708 | so that both can be identified in a diagnostic. */ |
1709 | pref->ref = ptr; |
1710 | |
1711 | return true; |
1712 | } |
1713 | |
1714 | /* If only the object referenced by one of the arguments could be |
1715 | determined, use it and... */ |
1716 | *pref = aref[0]; |
1717 | i0 = true; |
1718 | } |
1719 | else |
1720 | *pref = aref[1]; |
1721 | |
1722 | const bool i1 = !i0; |
1723 | /* ...see if the offset obtained from the other pointer can be used |
1724 | to tighten up the bound on the offset obtained from the first. */ |
1725 | if ((code == MAX_EXPR && aref[i1].offrng[1] < aref[i0].offrng[0]) |
1726 | || (code == MIN_EXPR && aref[i0].offrng[0] < aref[i1].offrng[1])) |
1727 | { |
1728 | pref->offrng[0] = aref[i0].offrng[0]; |
1729 | pref->offrng[1] = aref[i0].offrng[1]; |
1730 | } |
1731 | |
1732 | /* Replace PTR->REF with the SSA_NAME to indicate the expression |
1733 | might not refer to the same (sub)object. */ |
1734 | pref->ref = ptr; |
1735 | return true; |
1736 | } |
1737 | |
1738 | /* A helper of compute_objsize_r() to determine the size of a DECL. |
1739 | Return true on success and (possibly in the future) false on failure. */ |
1740 | |
1741 | static bool |
1742 | handle_decl (tree decl, bool addr, access_ref *pref) |
1743 | { |
1744 | tree decl_type = TREE_TYPE (decl); |
1745 | |
1746 | pref->ref = decl; |
1747 | |
1748 | /* Reset the offset in case it was set by a prior call and not |
1749 | cleared by the caller. The offset is only adjusted after |
1750 | the identity of the object has been determined. */ |
1751 | pref->offrng[0] = pref->offrng[1] = 0; |
1752 | |
1753 | if (!addr && POINTER_TYPE_P (decl_type)) |
1754 | { |
1755 | /* Set the maximum size if the reference is to the pointer |
1756 | itself (as opposed to what it points to), and clear |
1757 | BASE0 since the offset isn't necessarily zero-based. */ |
1758 | pref->set_max_size_range (); |
1759 | pref->base0 = false; |
1760 | return true; |
1761 | } |
1762 | |
1763 | /* Valid offsets into the object are nonnegative. */ |
1764 | pref->base0 = true; |
1765 | |
1766 | if (tree size = decl_init_size (decl, false)) |
1767 | if (TREE_CODE (size) == INTEGER_CST) |
1768 | { |
1769 | pref->sizrng[0] = wi::to_offset (t: size); |
1770 | pref->sizrng[1] = pref->sizrng[0]; |
1771 | return true; |
1772 | } |
1773 | |
1774 | pref->set_max_size_range (); |
1775 | return true; |
1776 | } |
1777 | |
1778 | /* A helper of compute_objsize_r() to determine the size from ARRAY_REF |
1779 | AREF. ADDR is true if PTR is the operand of ADDR_EXPR. Return true |
1780 | on success and false on failure. */ |
1781 | |
1782 | static bool |
1783 | handle_array_ref (tree aref, gimple *stmt, bool addr, int ostype, |
1784 | access_ref *pref, ssa_name_limit_t &snlim, |
1785 | pointer_query *qry) |
1786 | { |
1787 | gcc_assert (TREE_CODE (aref) == ARRAY_REF); |
1788 | |
1789 | tree arefop = TREE_OPERAND (aref, 0); |
1790 | tree reftype = TREE_TYPE (arefop); |
1791 | if (!addr && TREE_CODE (TREE_TYPE (reftype)) == POINTER_TYPE) |
1792 | /* Avoid arrays of pointers. FIXME: Hande pointers to arrays |
1793 | of known bound. */ |
1794 | return false; |
1795 | |
1796 | if (!compute_objsize_r (arefop, stmt, addr, ostype, pref, snlim, qry)) |
1797 | return false; |
1798 | |
1799 | offset_int orng[2]; |
1800 | tree off = pref->eval (TREE_OPERAND (aref, 1)); |
1801 | range_query *const rvals = qry ? qry->rvals : NULL; |
1802 | if (!get_offset_range (x: off, stmt, r: orng, rvals)) |
1803 | { |
1804 | /* Set ORNG to the maximum offset representable in ptrdiff_t. */ |
1805 | orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node)); |
1806 | orng[0] = -orng[1] - 1; |
1807 | } |
1808 | |
1809 | /* Convert the array index range determined above to a byte offset. */ |
1810 | tree lowbnd = array_ref_low_bound (aref); |
1811 | if (TREE_CODE (lowbnd) == INTEGER_CST && !integer_zerop (lowbnd)) |
1812 | { |
1813 | /* Adjust the index by the low bound of the array domain (0 in C/C++, |
1814 | 1 in Fortran and anything in Ada) by applying the same processing |
1815 | as in get_offset_range. */ |
1816 | const wide_int wlb = wi::to_wide (t: lowbnd); |
1817 | signop sgn = SIGNED; |
1818 | if (TYPE_UNSIGNED (TREE_TYPE (lowbnd)) |
1819 | && wlb.get_precision () < TYPE_PRECISION (sizetype)) |
1820 | sgn = UNSIGNED; |
1821 | const offset_int lb = offset_int::from (x: wlb, sgn); |
1822 | orng[0] -= lb; |
1823 | orng[1] -= lb; |
1824 | } |
1825 | |
1826 | tree eltype = TREE_TYPE (aref); |
1827 | tree tpsize = TYPE_SIZE_UNIT (eltype); |
1828 | if (!tpsize || TREE_CODE (tpsize) != INTEGER_CST) |
1829 | { |
1830 | pref->add_max_offset (); |
1831 | return true; |
1832 | } |
1833 | |
1834 | offset_int sz = wi::to_offset (t: tpsize); |
1835 | orng[0] *= sz; |
1836 | orng[1] *= sz; |
1837 | |
1838 | if (ostype && TREE_CODE (eltype) == ARRAY_TYPE) |
1839 | { |
1840 | /* Except for the permissive raw memory functions which use |
1841 | the size of the whole object determined above, use the size |
1842 | of the referenced array. Because the overall offset is from |
1843 | the beginning of the complete array object add this overall |
1844 | offset to the size of array. */ |
1845 | offset_int sizrng[2] = |
1846 | { |
1847 | pref->offrng[0] + orng[0] + sz, |
1848 | pref->offrng[1] + orng[1] + sz |
1849 | }; |
1850 | if (sizrng[1] < sizrng[0]) |
1851 | std::swap (a&: sizrng[0], b&: sizrng[1]); |
1852 | if (sizrng[0] >= 0 && sizrng[0] <= pref->sizrng[0]) |
1853 | pref->sizrng[0] = sizrng[0]; |
1854 | if (sizrng[1] >= 0 && sizrng[1] <= pref->sizrng[1]) |
1855 | pref->sizrng[1] = sizrng[1]; |
1856 | } |
1857 | |
1858 | pref->add_offset (min: orng[0], max: orng[1]); |
1859 | return true; |
1860 | } |
1861 | |
1862 | /* Given a COMPONENT_REF CREF, set *PREF size to the size of the referenced |
1863 | member. */ |
1864 | |
1865 | static void |
1866 | set_component_ref_size (tree cref, access_ref *pref) |
1867 | { |
1868 | const tree base = TREE_OPERAND (cref, 0); |
1869 | const tree base_type = TREE_TYPE (base); |
1870 | |
1871 | /* SAM is set for array members that might need special treatment. */ |
1872 | special_array_member sam; |
1873 | tree size = component_ref_size (cref, &sam); |
1874 | if (sam == special_array_member::int_0) |
1875 | pref->sizrng[0] = pref->sizrng[1] = 0; |
1876 | else if (!pref->trail1special && sam == special_array_member::trail_1) |
1877 | pref->sizrng[0] = pref->sizrng[1] = 1; |
1878 | else if (size && TREE_CODE (size) == INTEGER_CST) |
1879 | pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (t: size); |
1880 | else |
1881 | { |
1882 | /* When the size of the member is unknown it's either a flexible |
1883 | array member or a trailing special array member (either zero |
1884 | length or one-element). Set the size to the maximum minus |
1885 | the constant size of the base object's type. */ |
1886 | pref->sizrng[0] = 0; |
1887 | pref->sizrng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node)); |
1888 | if (tree base_size = TYPE_SIZE_UNIT (base_type)) |
1889 | if (TREE_CODE (base_size) == INTEGER_CST) |
1890 | pref->sizrng[1] -= wi::to_offset (t: base_size); |
1891 | } |
1892 | } |
1893 | |
1894 | /* A helper of compute_objsize_r() to determine the size from COMPONENT_REF |
1895 | CREF. Return true on success and false on failure. */ |
1896 | |
1897 | static bool |
1898 | handle_component_ref (tree cref, gimple *stmt, bool addr, int ostype, |
1899 | access_ref *pref, ssa_name_limit_t &snlim, |
1900 | pointer_query *qry) |
1901 | { |
1902 | gcc_assert (TREE_CODE (cref) == COMPONENT_REF); |
1903 | |
1904 | const tree base = TREE_OPERAND (cref, 0); |
1905 | const tree field = TREE_OPERAND (cref, 1); |
1906 | access_ref base_ref = *pref; |
1907 | |
1908 | /* Unconditionally determine the size of the base object (it could |
1909 | be smaller than the referenced member when the object is stored |
1910 | in a buffer with an insufficient size). */ |
1911 | if (!compute_objsize_r (base, stmt, addr, 0, &base_ref, snlim, qry)) |
1912 | return false; |
1913 | |
1914 | /* Add the offset of the member to the offset into the object computed |
1915 | so far. */ |
1916 | tree offset = byte_position (field); |
1917 | if (TREE_CODE (offset) == INTEGER_CST) |
1918 | base_ref.add_offset (off: wi::to_offset (t: offset)); |
1919 | else |
1920 | base_ref.add_max_offset (); |
1921 | |
1922 | if (!base_ref.ref) |
1923 | /* PREF->REF may have been already set to an SSA_NAME earlier |
1924 | to provide better context for diagnostics. In that case, |
1925 | leave it unchanged. */ |
1926 | base_ref.ref = base; |
1927 | |
1928 | const tree base_type = TREE_TYPE (base); |
1929 | if (TREE_CODE (base_type) == UNION_TYPE) |
1930 | /* In accesses through union types consider the entire unions |
1931 | rather than just their members. */ |
1932 | ostype = 0; |
1933 | |
1934 | if (ostype == 0) |
1935 | { |
1936 | /* In OSTYPE zero (for raw memory functions like memcpy), use |
1937 | the maximum size instead if the identity of the enclosing |
1938 | object cannot be determined. */ |
1939 | *pref = base_ref; |
1940 | return true; |
1941 | } |
1942 | |
1943 | pref->ref = field; |
1944 | |
1945 | if (!addr && POINTER_TYPE_P (TREE_TYPE (field))) |
1946 | { |
1947 | /* Set maximum size if the reference is to the pointer member |
1948 | itself (as opposed to what it points to). */ |
1949 | pref->set_max_size_range (); |
1950 | return true; |
1951 | } |
1952 | |
1953 | set_component_ref_size (cref, pref); |
1954 | |
1955 | if (base_ref.size_remaining () < pref->size_remaining ()) |
1956 | /* Use the base object if it's smaller than the member. */ |
1957 | *pref = base_ref; |
1958 | |
1959 | return true; |
1960 | } |
1961 | |
1962 | /* A helper of compute_objsize_r() to determine the size from MEM_REF |
1963 | MREF. Return true on success and false on failure. */ |
1964 | |
1965 | static bool |
1966 | handle_mem_ref (tree mref, gimple *stmt, int ostype, access_ref *pref, |
1967 | ssa_name_limit_t &snlim, pointer_query *qry) |
1968 | { |
1969 | gcc_assert (TREE_CODE (mref) == MEM_REF); |
1970 | |
1971 | tree mreftype = TYPE_MAIN_VARIANT (TREE_TYPE (mref)); |
1972 | if (VECTOR_TYPE_P (mreftype)) |
1973 | { |
1974 | /* Hack: Handle MEM_REFs of vector types as those to complete |
1975 | objects; those may be synthesized from multiple assignments |
1976 | to consecutive data members (see PR 93200 and 96963). |
1977 | FIXME: Vectorized assignments should only be present after |
1978 | vectorization so this hack is only necessary after it has |
1979 | run and could be avoided in calls from prior passes (e.g., |
1980 | tree-ssa-strlen.cc). |
1981 | FIXME: Deal with this more generally, e.g., by marking up |
1982 | such MEM_REFs at the time they're created. */ |
1983 | ostype = 0; |
1984 | } |
1985 | |
1986 | tree mrefop = TREE_OPERAND (mref, 0); |
1987 | if (!compute_objsize_r (mrefop, stmt, false, ostype, pref, snlim, qry)) |
1988 | return false; |
1989 | |
1990 | ++pref->deref; |
1991 | |
1992 | offset_int orng[2]; |
1993 | tree off = pref->eval (TREE_OPERAND (mref, 1)); |
1994 | range_query *const rvals = qry ? qry->rvals : NULL; |
1995 | if (!get_offset_range (x: off, stmt, r: orng, rvals)) |
1996 | { |
1997 | /* Set ORNG to the maximum offset representable in ptrdiff_t. */ |
1998 | orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node)); |
1999 | orng[0] = -orng[1] - 1; |
2000 | } |
2001 | |
2002 | pref->add_offset (min: orng[0], max: orng[1]); |
2003 | return true; |
2004 | } |
2005 | |
2006 | /* A helper of compute_objsize_r() to determine the size from SSA_NAME |
2007 | PTR. Return true on success and false on failure. */ |
2008 | |
2009 | static bool |
2010 | handle_ssa_name (tree ptr, bool addr, int ostype, |
2011 | access_ref *pref, ssa_name_limit_t &snlim, |
2012 | pointer_query *qry) |
2013 | { |
2014 | if (!snlim.next ()) |
2015 | return false; |
2016 | |
2017 | /* Only process an SSA_NAME if the recursion limit has not yet |
2018 | been reached. */ |
2019 | if (qry) |
2020 | { |
2021 | if (++qry->depth > qry->max_depth) |
2022 | qry->max_depth = qry->depth; |
2023 | if (const access_ref *cache_ref = qry->get_ref (ptr, ostype)) |
2024 | { |
2025 | /* Add the number of DEREFerences accummulated so far. */ |
2026 | const int deref = pref->deref; |
2027 | *pref = *cache_ref; |
2028 | pref->deref += deref; |
2029 | return true; |
2030 | } |
2031 | } |
2032 | |
2033 | gimple *stmt = SSA_NAME_DEF_STMT (ptr); |
2034 | if (is_gimple_call (gs: stmt)) |
2035 | { |
2036 | /* If STMT is a call to an allocation function get the size |
2037 | from its argument(s). If successful, also set *PREF->REF |
2038 | to PTR for the caller to include in diagnostics. */ |
2039 | wide_int wr[2]; |
2040 | range_query *const rvals = qry ? qry->rvals : NULL; |
2041 | if (gimple_call_alloc_size (stmt, rng1: wr, qry: rvals)) |
2042 | { |
2043 | pref->ref = ptr; |
2044 | pref->sizrng[0] = offset_int::from (x: wr[0], sgn: UNSIGNED); |
2045 | pref->sizrng[1] = offset_int::from (x: wr[1], sgn: UNSIGNED); |
2046 | /* Constrain both bounds to a valid size. */ |
2047 | offset_int maxsize = wi::to_offset (t: max_object_size ()); |
2048 | if (pref->sizrng[0] > maxsize) |
2049 | pref->sizrng[0] = maxsize; |
2050 | if (pref->sizrng[1] > maxsize) |
2051 | pref->sizrng[1] = maxsize; |
2052 | } |
2053 | else |
2054 | { |
2055 | /* For functions known to return one of their pointer arguments |
2056 | try to determine what the returned pointer points to, and on |
2057 | success add OFFRNG which was set to the offset added by |
2058 | the function (e.g., memchr) to the overall offset. */ |
2059 | bool past_end; |
2060 | offset_int offrng[2]; |
2061 | if (tree ret = gimple_call_return_array (stmt, offrng, past_end: &past_end, |
2062 | snlim, qry)) |
2063 | { |
2064 | if (!compute_objsize_r (ret, stmt, addr, ostype, pref, snlim, qry)) |
2065 | return false; |
2066 | |
2067 | /* Cap OFFRNG[1] to at most the remaining size of |
2068 | the object. */ |
2069 | offset_int remrng[2]; |
2070 | remrng[1] = pref->size_remaining (pmin: remrng); |
2071 | if (remrng[1] != 0 && !past_end) |
2072 | /* Decrement the size for functions that never return |
2073 | a past-the-end pointer. */ |
2074 | remrng[1] -= 1; |
2075 | |
2076 | if (remrng[1] < offrng[1]) |
2077 | offrng[1] = remrng[1]; |
2078 | pref->add_offset (min: offrng[0], max: offrng[1]); |
2079 | } |
2080 | else |
2081 | { |
2082 | /* For other calls that might return arbitrary pointers |
2083 | including into the middle of objects set the size |
2084 | range to maximum, clear PREF->BASE0, and also set |
2085 | PREF->REF to include in diagnostics. */ |
2086 | pref->set_max_size_range (); |
2087 | pref->base0 = false; |
2088 | pref->ref = ptr; |
2089 | } |
2090 | } |
2091 | qry->put_ref (ptr, ref: *pref, ostype); |
2092 | return true; |
2093 | } |
2094 | |
2095 | if (gimple_nop_p (g: stmt)) |
2096 | { |
2097 | /* For a function argument try to determine the byte size |
2098 | of the array from the current function declaratation |
2099 | (e.g., attribute access or related). */ |
2100 | wide_int wr[2]; |
2101 | bool static_array = false; |
2102 | if (tree ref = gimple_parm_array_size (ptr, rng: wr, static_array: &static_array)) |
2103 | { |
2104 | pref->parmarray = !static_array; |
2105 | pref->sizrng[0] = offset_int::from (x: wr[0], sgn: UNSIGNED); |
2106 | pref->sizrng[1] = offset_int::from (x: wr[1], sgn: UNSIGNED); |
2107 | pref->ref = ref; |
2108 | qry->put_ref (ptr, ref: *pref, ostype); |
2109 | return true; |
2110 | } |
2111 | |
2112 | pref->set_max_size_range (); |
2113 | pref->base0 = false; |
2114 | pref->ref = ptr; |
2115 | qry->put_ref (ptr, ref: *pref, ostype); |
2116 | return true; |
2117 | } |
2118 | |
2119 | if (gimple_code (g: stmt) == GIMPLE_PHI) |
2120 | { |
2121 | /* Pass PTR to get_ref() via PREF. If all PHI arguments refer |
2122 | to the same object the function will replace it with it. */ |
2123 | pref->ref = ptr; |
2124 | access_ref phi_ref = *pref; |
2125 | if (!pref->get_ref (NULL, pref: &phi_ref, ostype, psnlim: &snlim, qry)) |
2126 | return false; |
2127 | *pref = phi_ref; |
2128 | qry->put_ref (ptr, ref: *pref, ostype); |
2129 | return true; |
2130 | } |
2131 | |
2132 | if (!is_gimple_assign (gs: stmt)) |
2133 | { |
2134 | /* Clear BASE0 since the assigned pointer might point into |
2135 | the middle of the object, set the maximum size range and, |
2136 | if the SSA_NAME refers to a function argumnent, set |
2137 | PREF->REF to it. */ |
2138 | pref->base0 = false; |
2139 | pref->set_max_size_range (); |
2140 | pref->ref = ptr; |
2141 | return true; |
2142 | } |
2143 | |
2144 | tree_code code = gimple_assign_rhs_code (gs: stmt); |
2145 | |
2146 | if (code == MAX_EXPR || code == MIN_EXPR) |
2147 | { |
2148 | if (!handle_min_max_size (ptr, ostype, pref, snlim, qry)) |
2149 | return false; |
2150 | |
2151 | qry->put_ref (ptr, ref: *pref, ostype); |
2152 | return true; |
2153 | } |
2154 | |
2155 | tree rhs = gimple_assign_rhs1 (gs: stmt); |
2156 | |
2157 | if (code == POINTER_PLUS_EXPR |
2158 | && TREE_CODE (TREE_TYPE (rhs)) == POINTER_TYPE) |
2159 | { |
2160 | /* Compute the size of the object first. */ |
2161 | if (!compute_objsize_r (rhs, stmt, addr, ostype, pref, snlim, qry)) |
2162 | return false; |
2163 | |
2164 | offset_int orng[2]; |
2165 | tree off = gimple_assign_rhs2 (gs: stmt); |
2166 | range_query *const rvals = qry ? qry->rvals : NULL; |
2167 | if (get_offset_range (x: off, stmt, r: orng, rvals)) |
2168 | pref->add_offset (min: orng[0], max: orng[1]); |
2169 | else |
2170 | pref->add_max_offset (); |
2171 | |
2172 | qry->put_ref (ptr, ref: *pref, ostype); |
2173 | return true; |
2174 | } |
2175 | |
2176 | if (code == ADDR_EXPR || code == SSA_NAME) |
2177 | { |
2178 | if (!compute_objsize_r (rhs, stmt, addr, ostype, pref, snlim, qry)) |
2179 | return false; |
2180 | qry->put_ref (ptr, ref: *pref, ostype); |
2181 | return true; |
2182 | } |
2183 | |
2184 | if (ostype > 1 && POINTER_TYPE_P (TREE_TYPE (rhs))) |
2185 | { |
2186 | /* When determining the qualifiers follow the pointer but |
2187 | avoid caching the result. As the pointer is added to |
2188 | and/or dereferenced the computed size and offset need |
2189 | not be meaningful for other queries involving the same |
2190 | pointer. */ |
2191 | if (!compute_objsize_r (rhs, stmt, addr, ostype, pref, snlim, qry)) |
2192 | return false; |
2193 | |
2194 | rhs = pref->ref; |
2195 | } |
2196 | |
2197 | /* (This could also be an assignment from a nonlocal pointer.) Save |
2198 | PTR to mention in diagnostics but otherwise treat it as a pointer |
2199 | to an unknown object. */ |
2200 | pref->ref = rhs; |
2201 | pref->base0 = false; |
2202 | pref->set_max_size_range (); |
2203 | return true; |
2204 | } |
2205 | |
2206 | /* Helper to compute the size of the object referenced by the PTR |
2207 | expression which must have pointer type, using Object Size type |
2208 | OSTYPE (only the least significant 2 bits are used). |
2209 | On success, sets PREF->REF to the DECL of the referenced object |
2210 | if it's unique, otherwise to null, PREF->OFFRNG to the range of |
2211 | offsets into it, and PREF->SIZRNG to the range of sizes of |
2212 | the object(s). |
2213 | ADDR is true for an enclosing ADDR_EXPR. |
2214 | SNLIM is used to avoid visiting the same PHI operand multiple |
2215 | times, and, when nonnull, RVALS to determine range information. |
2216 | Returns true on success, false when a meaningful size (or range) |
2217 | cannot be determined. |
2218 | |
2219 | The function is intended for diagnostics and should not be used |
2220 | to influence code generation or optimization. */ |
2221 | |
2222 | static bool |
2223 | compute_objsize_r (tree ptr, gimple *stmt, bool addr, int ostype, |
2224 | access_ref *pref, ssa_name_limit_t &snlim, |
2225 | pointer_query *qry) |
2226 | { |
2227 | STRIP_NOPS (ptr); |
2228 | |
2229 | if (DECL_P (ptr)) |
2230 | return handle_decl (decl: ptr, addr, pref); |
2231 | |
2232 | switch (TREE_CODE (ptr)) |
2233 | { |
2234 | case ADDR_EXPR: |
2235 | { |
2236 | tree ref = TREE_OPERAND (ptr, 0); |
2237 | if (!compute_objsize_r (ptr: ref, stmt, addr: true, ostype, pref, snlim, qry)) |
2238 | return false; |
2239 | |
2240 | --pref->deref; |
2241 | return true; |
2242 | } |
2243 | |
2244 | case BIT_FIELD_REF: |
2245 | { |
2246 | tree ref = TREE_OPERAND (ptr, 0); |
2247 | if (!compute_objsize_r (ptr: ref, stmt, addr, ostype, pref, snlim, qry)) |
2248 | return false; |
2249 | |
2250 | offset_int off = wi::to_offset (t: pref->eval (TREE_OPERAND (ptr, 2))); |
2251 | pref->add_offset (off: off / BITS_PER_UNIT); |
2252 | return true; |
2253 | } |
2254 | |
2255 | case ARRAY_REF: |
2256 | return handle_array_ref (aref: ptr, stmt, addr, ostype, pref, snlim, qry); |
2257 | |
2258 | case COMPONENT_REF: |
2259 | return handle_component_ref (cref: ptr, stmt, addr, ostype, pref, snlim, qry); |
2260 | |
2261 | case MEM_REF: |
2262 | return handle_mem_ref (mref: ptr, stmt, ostype, pref, snlim, qry); |
2263 | |
2264 | case TARGET_MEM_REF: |
2265 | { |
2266 | tree ref = TREE_OPERAND (ptr, 0); |
2267 | if (!compute_objsize_r (ptr: ref, stmt, addr, ostype, pref, snlim, qry)) |
2268 | return false; |
2269 | |
2270 | /* TODO: Handle remaining operands. Until then, add maximum offset. */ |
2271 | pref->ref = ptr; |
2272 | pref->add_max_offset (); |
2273 | return true; |
2274 | } |
2275 | |
2276 | case INTEGER_CST: |
2277 | /* Pointer constants other than null smaller than param_min_pagesize |
2278 | might be the result of erroneous null pointer addition/subtraction. |
2279 | Unless zero is a valid address set size to zero. For null pointers, |
2280 | set size to the maximum for now since those may be the result of |
2281 | jump threading. Similarly, for values >= param_min_pagesize in |
2282 | order to support (type *) 0x7cdeab00. */ |
2283 | if (integer_zerop (ptr) |
2284 | || wi::to_widest (t: ptr) >= param_min_pagesize) |
2285 | pref->set_max_size_range (); |
2286 | else if (POINTER_TYPE_P (TREE_TYPE (ptr))) |
2287 | { |
2288 | tree deref_type = TREE_TYPE (TREE_TYPE (ptr)); |
2289 | addr_space_t as = TYPE_ADDR_SPACE (deref_type); |
2290 | if (targetm.addr_space.zero_address_valid (as)) |
2291 | pref->set_max_size_range (); |
2292 | else |
2293 | { |
2294 | pref->sizrng[0] = pref->sizrng[1] = 0; |
2295 | pref->ref_nullptr_p = true; |
2296 | } |
2297 | } |
2298 | else |
2299 | pref->sizrng[0] = pref->sizrng[1] = 0; |
2300 | |
2301 | pref->ref = ptr; |
2302 | return true; |
2303 | |
2304 | case STRING_CST: |
2305 | pref->sizrng[0] = pref->sizrng[1] = TREE_STRING_LENGTH (ptr); |
2306 | pref->ref = ptr; |
2307 | return true; |
2308 | |
2309 | case POINTER_PLUS_EXPR: |
2310 | { |
2311 | tree ref = TREE_OPERAND (ptr, 0); |
2312 | if (!compute_objsize_r (ptr: ref, stmt, addr, ostype, pref, snlim, qry)) |
2313 | return false; |
2314 | |
2315 | /* The below only makes sense if the offset is being applied to the |
2316 | address of the object. */ |
2317 | if (pref->deref != -1) |
2318 | return false; |
2319 | |
2320 | offset_int orng[2]; |
2321 | tree off = pref->eval (TREE_OPERAND (ptr, 1)); |
2322 | if (get_offset_range (x: off, stmt, r: orng, rvals: qry->rvals)) |
2323 | pref->add_offset (min: orng[0], max: orng[1]); |
2324 | else |
2325 | pref->add_max_offset (); |
2326 | return true; |
2327 | } |
2328 | |
2329 | case VIEW_CONVERT_EXPR: |
2330 | ptr = TREE_OPERAND (ptr, 0); |
2331 | return compute_objsize_r (ptr, stmt, addr, ostype, pref, snlim, qry); |
2332 | |
2333 | case SSA_NAME: |
2334 | return handle_ssa_name (ptr, addr, ostype, pref, snlim, qry); |
2335 | |
2336 | default: |
2337 | break; |
2338 | } |
2339 | |
2340 | /* Assume all other expressions point into an unknown object |
2341 | of the maximum valid size. */ |
2342 | pref->ref = ptr; |
2343 | pref->base0 = false; |
2344 | pref->set_max_size_range (); |
2345 | if (TREE_CODE (ptr) == SSA_NAME) |
2346 | qry->put_ref (ptr, ref: *pref); |
2347 | return true; |
2348 | } |
2349 | |
2350 | /* A "public" wrapper around the above. Clients should use this overload |
2351 | instead. */ |
2352 | |
2353 | tree |
2354 | compute_objsize (tree ptr, gimple *stmt, int ostype, access_ref *pref, |
2355 | pointer_query *ptr_qry) |
2356 | { |
2357 | pointer_query qry; |
2358 | if (ptr_qry) |
2359 | ptr_qry->depth = 0; |
2360 | else |
2361 | ptr_qry = &qry; |
2362 | |
2363 | /* Clear and invalidate in case *PREF is being reused. */ |
2364 | pref->offrng[0] = pref->offrng[1] = 0; |
2365 | pref->sizrng[0] = pref->sizrng[1] = -1; |
2366 | |
2367 | ssa_name_limit_t snlim; |
2368 | if (!compute_objsize_r (ptr, stmt, addr: false, ostype, pref, snlim, qry: ptr_qry)) |
2369 | return NULL_TREE; |
2370 | |
2371 | offset_int maxsize = pref->size_remaining (); |
2372 | if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0) |
2373 | pref->offrng[0] = 0; |
2374 | return wide_int_to_tree (sizetype, cst: maxsize); |
2375 | } |
2376 | |
2377 | /* Transitional wrapper. The function should be removed once callers |
2378 | transition to the pointer_query API. */ |
2379 | |
2380 | tree |
2381 | compute_objsize (tree ptr, gimple *stmt, int ostype, access_ref *pref, |
2382 | range_query *rvals /* = NULL */) |
2383 | { |
2384 | pointer_query qry; |
2385 | qry.rvals = rvals; |
2386 | return compute_objsize (ptr, stmt, ostype, pref, ptr_qry: &qry); |
2387 | } |
2388 | |
2389 | /* Legacy wrapper around the above. The function should be removed |
2390 | once callers transition to one of the two above. */ |
2391 | |
2392 | tree |
2393 | compute_objsize (tree ptr, gimple *stmt, int ostype, tree *pdecl /* = NULL */, |
2394 | tree *poff /* = NULL */, range_query *rvals /* = NULL */) |
2395 | { |
2396 | /* Set the initial offsets to zero and size to negative to indicate |
2397 | none has been computed yet. */ |
2398 | access_ref ref; |
2399 | tree size = compute_objsize (ptr, stmt, ostype, pref: &ref, rvals); |
2400 | if (!size || !ref.base0) |
2401 | return NULL_TREE; |
2402 | |
2403 | if (pdecl) |
2404 | *pdecl = ref.ref; |
2405 | |
2406 | if (poff) |
2407 | *poff = wide_int_to_tree (ptrdiff_type_node, cst: ref.offrng[ref.offrng[0] < 0]); |
2408 | |
2409 | return size; |
2410 | } |
2411 | |
2412 | /* Determine the offset *FLDOFF of the first byte of a struct member |
2413 | of TYPE (possibly recursively) into which the byte offset OFF points, |
2414 | starting after the field START_AFTER if it's non-null. On success, |
2415 | if nonnull, set *FLDOFF to the offset of the first byte, and return |
2416 | the field decl. If nonnull, set *NEXTOFF to the offset of the next |
2417 | field (which reflects any padding between the returned field and |
2418 | the next). Otherwise, if no such member can be found, return null. */ |
2419 | |
2420 | tree |
2421 | field_at_offset (tree type, tree start_after, HOST_WIDE_INT off, |
2422 | HOST_WIDE_INT *fldoff /* = nullptr */, |
2423 | HOST_WIDE_INT *nextoff /* = nullptr */) |
2424 | { |
2425 | tree first_fld = TYPE_FIELDS (type); |
2426 | |
2427 | HOST_WIDE_INT offbuf = 0, nextbuf = 0; |
2428 | if (!fldoff) |
2429 | fldoff = &offbuf; |
2430 | if (!nextoff) |
2431 | nextoff = &nextbuf; |
2432 | |
2433 | *nextoff = 0; |
2434 | |
2435 | /* The field to return. */ |
2436 | tree last_fld = NULL_TREE; |
2437 | /* The next field to advance to. */ |
2438 | tree next_fld = NULL_TREE; |
2439 | |
2440 | /* NEXT_FLD's cached offset. */ |
2441 | HOST_WIDE_INT next_pos = -1; |
2442 | |
2443 | for (tree fld = first_fld; fld; fld = next_fld) |
2444 | { |
2445 | next_fld = fld; |
2446 | do |
2447 | /* Advance to the next relevant data member. */ |
2448 | next_fld = TREE_CHAIN (next_fld); |
2449 | while (next_fld |
2450 | && (TREE_CODE (next_fld) != FIELD_DECL |
2451 | || DECL_ARTIFICIAL (next_fld))); |
2452 | |
2453 | if (TREE_CODE (fld) != FIELD_DECL || DECL_ARTIFICIAL (fld)) |
2454 | continue; |
2455 | |
2456 | if (fld == start_after) |
2457 | continue; |
2458 | |
2459 | tree fldtype = TREE_TYPE (fld); |
2460 | /* The offset of FLD within its immediately enclosing structure. */ |
2461 | HOST_WIDE_INT fldpos = next_pos < 0 ? int_byte_position (fld) : next_pos; |
2462 | |
2463 | tree typesize = TYPE_SIZE_UNIT (fldtype); |
2464 | if (typesize && TREE_CODE (typesize) != INTEGER_CST) |
2465 | /* Bail if FLD is a variable length member. */ |
2466 | return NULL_TREE; |
2467 | |
2468 | /* If the size is not available the field is a flexible array |
2469 | member. Treat this case as success. */ |
2470 | HOST_WIDE_INT fldsize = (tree_fits_uhwi_p (typesize) |
2471 | ? tree_to_uhwi (typesize) |
2472 | : off); |
2473 | |
2474 | /* If OFF is beyond the end of the current field continue. */ |
2475 | HOST_WIDE_INT fldend = fldpos + fldsize; |
2476 | if (fldend < off) |
2477 | continue; |
2478 | |
2479 | if (next_fld) |
2480 | { |
2481 | /* If OFF is equal to the offset of the next field continue |
2482 | to it and skip the array/struct business below. */ |
2483 | tree pos = byte_position (next_fld); |
2484 | if (!tree_fits_shwi_p (pos)) |
2485 | /* Bail if NEXT_FLD is a variable length member. */ |
2486 | return NULL_TREE; |
2487 | next_pos = tree_to_shwi (pos); |
2488 | *nextoff = *fldoff + next_pos; |
2489 | if (*nextoff == off && TREE_CODE (type) != UNION_TYPE) |
2490 | continue; |
2491 | } |
2492 | else |
2493 | *nextoff = HOST_WIDE_INT_MAX; |
2494 | |
2495 | /* OFF refers somewhere into the current field or just past its end, |
2496 | which could mean it refers to the next field. */ |
2497 | if (TREE_CODE (fldtype) == ARRAY_TYPE) |
2498 | { |
2499 | /* Will be set to the offset of the first byte of the array |
2500 | element (which may be an array) of FLDTYPE into which |
2501 | OFF - FLDPOS points (which may be past ELTOFF). */ |
2502 | HOST_WIDE_INT eltoff = 0; |
2503 | if (tree ft = array_elt_at_offset (fldtype, off - fldpos, &eltoff)) |
2504 | fldtype = ft; |
2505 | else |
2506 | continue; |
2507 | |
2508 | /* Advance the position to include the array element above. |
2509 | If OFF - FLPOS refers to a member of FLDTYPE, the member |
2510 | will be determined below. */ |
2511 | fldpos += eltoff; |
2512 | } |
2513 | |
2514 | *fldoff += fldpos; |
2515 | |
2516 | if (TREE_CODE (fldtype) == RECORD_TYPE) |
2517 | /* Drill down into the current field if it's a struct. */ |
2518 | fld = field_at_offset (type: fldtype, start_after, off: off - fldpos, |
2519 | fldoff, nextoff); |
2520 | |
2521 | last_fld = fld; |
2522 | |
2523 | /* Unless the offset is just past the end of the field return it. |
2524 | Otherwise save it and return it only if the offset of the next |
2525 | next field is greater (i.e., there is padding between the two) |
2526 | or if there is no next field. */ |
2527 | if (off < fldend) |
2528 | break; |
2529 | } |
2530 | |
2531 | if (*nextoff == HOST_WIDE_INT_MAX && next_fld) |
2532 | *nextoff = next_pos; |
2533 | |
2534 | return last_fld; |
2535 | } |
2536 | |
2537 | /* Determine the offset *ELTOFF of the first byte of the array element |
2538 | of array ARTYPE into which the byte offset OFF points. On success |
2539 | set *ELTOFF to the offset of the first byte and return type. |
2540 | Otherwise, if no such element can be found, return null. */ |
2541 | |
2542 | tree |
2543 | array_elt_at_offset (tree artype, HOST_WIDE_INT off, |
2544 | HOST_WIDE_INT *eltoff /* = nullptr */, |
2545 | HOST_WIDE_INT *subar_size /* = nullptr */) |
2546 | { |
2547 | gcc_assert (TREE_CODE (artype) == ARRAY_TYPE); |
2548 | |
2549 | HOST_WIDE_INT dummy; |
2550 | if (!eltoff) |
2551 | eltoff = &dummy; |
2552 | if (!subar_size) |
2553 | subar_size = &dummy; |
2554 | |
2555 | tree eltype = artype; |
2556 | while (TREE_CODE (TREE_TYPE (eltype)) == ARRAY_TYPE) |
2557 | eltype = TREE_TYPE (eltype); |
2558 | |
2559 | tree subartype = eltype; |
2560 | if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (eltype)) |
2561 | || TYPE_MODE (TREE_TYPE (eltype)) != TYPE_MODE (char_type_node)) |
2562 | eltype = TREE_TYPE (eltype); |
2563 | |
2564 | *subar_size = int_size_in_bytes (subartype); |
2565 | |
2566 | if (eltype == artype) |
2567 | { |
2568 | *eltoff = 0; |
2569 | return artype; |
2570 | } |
2571 | |
2572 | HOST_WIDE_INT artype_size = int_size_in_bytes (artype); |
2573 | HOST_WIDE_INT eltype_size = int_size_in_bytes (eltype); |
2574 | |
2575 | if (off < artype_size)// * eltype_size) |
2576 | { |
2577 | *eltoff = (off / eltype_size) * eltype_size; |
2578 | return TREE_CODE (eltype) == ARRAY_TYPE ? TREE_TYPE (eltype) : eltype; |
2579 | } |
2580 | |
2581 | return NULL_TREE; |
2582 | } |
2583 | |
2584 | /* Wrapper around build_array_type_nelts that makes sure the array |
2585 | can be created at all and handles zero sized arrays specially. */ |
2586 | |
2587 | tree |
2588 | build_printable_array_type (tree eltype, unsigned HOST_WIDE_INT nelts) |
2589 | { |
2590 | if (TYPE_SIZE_UNIT (eltype) |
2591 | && TREE_CODE (TYPE_SIZE_UNIT (eltype)) == INTEGER_CST |
2592 | && !integer_zerop (TYPE_SIZE_UNIT (eltype)) |
2593 | && TYPE_ALIGN_UNIT (eltype) > 1 |
2594 | && wi::zext (x: wi::to_wide (TYPE_SIZE_UNIT (eltype)), |
2595 | offset: ffs_hwi (TYPE_ALIGN_UNIT (eltype)) - 1) != 0) |
2596 | eltype = TYPE_MAIN_VARIANT (eltype); |
2597 | |
2598 | /* Consider excessive NELTS an array of unknown bound. */ |
2599 | tree idxtype = NULL_TREE; |
2600 | if (nelts < HOST_WIDE_INT_MAX) |
2601 | { |
2602 | if (nelts) |
2603 | return build_array_type_nelts (eltype, nelts); |
2604 | idxtype = build_range_type (sizetype, size_zero_node, NULL_TREE); |
2605 | } |
2606 | |
2607 | tree arrtype = build_array_type (eltype, idxtype); |
2608 | arrtype = build_distinct_type_copy (TYPE_MAIN_VARIANT (arrtype)); |
2609 | TYPE_SIZE (arrtype) = bitsize_zero_node; |
2610 | TYPE_SIZE_UNIT (arrtype) = size_zero_node; |
2611 | return arrtype; |
2612 | } |
2613 | |