1/* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
2
3 Copyright (C) 2003-2024 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "backend.h"
25#include "tree.h"
26#include "gimple.h"
27#include "tree-pass.h"
28#include "fold-const.h"
29#include "tree-nested.h"
30#include "calls.h"
31#include "gimple-iterator.h"
32#include "gimple-low.h"
33#include "predict.h"
34#include "gimple-predict.h"
35#include "gimple-fold.h"
36#include "cgraph.h"
37#include "tree-ssa.h"
38#include "value-range.h"
39#include "stringpool.h"
40#include "tree-ssanames.h"
41#include "tree-inline.h"
42#include "gimple-walk.h"
43#include "attribs.h"
44
45/* The differences between High GIMPLE and Low GIMPLE are the
46 following:
47
48 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
49
50 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
51 flow and exception regions are built as an on-the-side region
52 hierarchy (See tree-eh.cc:lower_eh_constructs).
53
54 3- Multiple identical return statements are grouped into a single
55 return and gotos to the unique return site. */
56
57/* Match a return statement with a label. During lowering, we identify
58 identical return statements and replace duplicates with a jump to
59 the corresponding label. */
60struct return_statements_t
61{
62 tree label;
63 greturn *stmt;
64};
65typedef struct return_statements_t return_statements_t;
66
67
68struct lower_data
69{
70 /* Block the current statement belongs to. */
71 tree block;
72
73 /* A vector of label and return statements to be moved to the end
74 of the function. */
75 vec<return_statements_t> return_statements;
76
77 /* True if the current statement cannot fall through. */
78 bool cannot_fallthru;
79};
80
81static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
82static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
83static void lower_try_catch (gimple_stmt_iterator *, struct lower_data *);
84static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
85static void lower_builtin_setjmp (gimple_stmt_iterator *);
86static void lower_builtin_posix_memalign (gimple_stmt_iterator *);
87static void lower_builtin_assume_aligned (gimple_stmt_iterator *);
88
89
90/* Lower the body of current_function_decl from High GIMPLE into Low
91 GIMPLE. */
92
93static unsigned int
94lower_function_body (void)
95{
96 struct lower_data data;
97 gimple_seq body = gimple_body (current_function_decl);
98 gimple_seq lowered_body;
99 gimple_stmt_iterator i;
100 gimple *bind;
101 gimple *x;
102
103 /* The gimplifier should've left a body of exactly one statement,
104 namely a GIMPLE_BIND. */
105 gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
106 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
107
108 memset (s: &data, c: 0, n: sizeof (data));
109 data.block = DECL_INITIAL (current_function_decl);
110 BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
111 BLOCK_CHAIN (data.block) = NULL_TREE;
112 TREE_ASM_WRITTEN (data.block) = 1;
113 data.return_statements.create (nelems: 8);
114
115 bind = gimple_seq_first_stmt (s: body);
116 lowered_body = NULL;
117 gimple_seq_add_stmt (&lowered_body, bind);
118 i = gsi_start (seq&: lowered_body);
119 lower_gimple_bind (&i, &data);
120
121 i = gsi_last (seq&: lowered_body);
122
123 /* If we had begin stmt markers from e.g. PCH, but this compilation
124 doesn't want them, lower_stmt will have cleaned them up; we can
125 now clear the flag that indicates we had them. */
126 if (!MAY_HAVE_DEBUG_MARKER_STMTS && cfun->debug_nonbind_markers)
127 {
128 /* This counter needs not be exact, but before lowering it will
129 most certainly be. */
130 gcc_assert (cfun->debug_marker_count == 0);
131 cfun->debug_nonbind_markers = false;
132 }
133
134 /* If the function falls off the end, we need a null return statement.
135 If we've already got one in the return_statements vector, we don't
136 need to do anything special. Otherwise build one by hand. */
137 bool may_fallthru = gimple_seq_may_fallthru (lowered_body);
138 if (may_fallthru
139 && (data.return_statements.is_empty ()
140 || (gimple_return_retval (gs: data.return_statements.last().stmt)
141 != NULL)))
142 {
143 x = gimple_build_return (NULL);
144 gimple_set_location (g: x, cfun->function_end_locus);
145 gimple_set_block (g: x, DECL_INITIAL (current_function_decl));
146 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
147 may_fallthru = false;
148 }
149
150 /* If we lowered any return statements, emit the representative
151 at the end of the function. */
152 while (!data.return_statements.is_empty ())
153 {
154 return_statements_t t = data.return_statements.pop ();
155 x = gimple_build_label (label: t.label);
156 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
157 gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
158 if (may_fallthru)
159 {
160 /* Remove the line number from the representative return statement.
161 It now fills in for the fallthru too. Failure to remove this
162 will result in incorrect results for coverage analysis. */
163 gimple_set_location (g: t.stmt, UNKNOWN_LOCATION);
164 may_fallthru = false;
165 }
166 }
167
168 /* Once the old body has been lowered, replace it with the new
169 lowered sequence. */
170 gimple_set_body (current_function_decl, lowered_body);
171
172 gcc_assert (data.block == DECL_INITIAL (current_function_decl));
173 BLOCK_SUBBLOCKS (data.block)
174 = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
175
176 clear_block_marks (data.block);
177 data.return_statements.release ();
178 return 0;
179}
180
181namespace {
182
183const pass_data pass_data_lower_cf =
184{
185 .type: GIMPLE_PASS, /* type */
186 .name: "lower", /* name */
187 .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */
188 .tv_id: TV_NONE, /* tv_id */
189 PROP_gimple_any, /* properties_required */
190 PROP_gimple_lcf, /* properties_provided */
191 .properties_destroyed: 0, /* properties_destroyed */
192 .todo_flags_start: 0, /* todo_flags_start */
193 .todo_flags_finish: 0, /* todo_flags_finish */
194};
195
196class pass_lower_cf : public gimple_opt_pass
197{
198public:
199 pass_lower_cf (gcc::context *ctxt)
200 : gimple_opt_pass (pass_data_lower_cf, ctxt)
201 {}
202
203 /* opt_pass methods: */
204 unsigned int execute (function *) final override
205 {
206 return lower_function_body ();
207 }
208
209}; // class pass_lower_cf
210
211} // anon namespace
212
213gimple_opt_pass *
214make_pass_lower_cf (gcc::context *ctxt)
215{
216 return new pass_lower_cf (ctxt);
217}
218
219/* Lower sequence SEQ. Unlike gimplification the statements are not relowered
220 when they are changed -- if this has to be done, the lowering routine must
221 do it explicitly. DATA is passed through the recursion. */
222
223static void
224lower_sequence (gimple_seq *seq, struct lower_data *data)
225{
226 gimple_stmt_iterator gsi;
227
228 for (gsi = gsi_start (seq&: *seq); !gsi_end_p (i: gsi); )
229 lower_stmt (&gsi, data);
230}
231
232
233/* Lower the OpenMP directive statement pointed by GSI. DATA is
234 passed through the recursion. */
235
236static void
237lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
238{
239 gimple *stmt;
240
241 stmt = gsi_stmt (i: *gsi);
242
243 lower_sequence (seq: gimple_omp_body_ptr (gs: stmt), data);
244 gsi_insert_seq_after (gsi, gimple_omp_body (gs: stmt), GSI_CONTINUE_LINKING);
245 gimple_omp_set_body (gs: stmt, NULL);
246 gsi_next (i: gsi);
247}
248
249/* Create an artificial FUNCTION_DECL for assumption at LOC. */
250
251static tree
252create_assumption_fn (location_t loc)
253{
254 tree name = clone_function_name_numbered (decl: current_function_decl, suffix: "_assume");
255 /* Temporarily, until we determine all the arguments. */
256 tree type = build_varargs_function_type_list (boolean_type_node, NULL_TREE);
257 tree decl = build_decl (loc, FUNCTION_DECL, name, type);
258 TREE_STATIC (decl) = 1;
259 TREE_USED (decl) = 1;
260 DECL_ARTIFICIAL (decl) = 1;
261 DECL_IGNORED_P (decl) = 1;
262 DECL_NAMELESS (decl) = 1;
263 TREE_PUBLIC (decl) = 0;
264 DECL_UNINLINABLE (decl) = 1;
265 DECL_EXTERNAL (decl) = 0;
266 DECL_CONTEXT (decl) = NULL_TREE;
267 DECL_INITIAL (decl) = make_node (BLOCK);
268 tree attributes = DECL_ATTRIBUTES (current_function_decl);
269 if (lookup_attribute (attr_name: "noipa", list: attributes) == NULL)
270 {
271 attributes = tree_cons (get_identifier ("noipa"), NULL, attributes);
272 if (lookup_attribute (attr_name: "noinline", list: attributes) == NULL)
273 attributes = tree_cons (get_identifier ("noinline"), NULL, attributes);
274 if (lookup_attribute (attr_name: "noclone", list: attributes) == NULL)
275 attributes = tree_cons (get_identifier ("noclone"), NULL, attributes);
276 if (lookup_attribute (attr_name: "no_icf", list: attributes) == NULL)
277 attributes = tree_cons (get_identifier ("no_icf"), NULL, attributes);
278 }
279 DECL_ATTRIBUTES (decl) = attributes;
280 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
281 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
282 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
283 DECL_FUNCTION_SPECIFIC_TARGET (decl)
284 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
285 tree t = build_decl (DECL_SOURCE_LOCATION (decl),
286 RESULT_DECL, NULL_TREE, boolean_type_node);
287 DECL_ARTIFICIAL (t) = 1;
288 DECL_IGNORED_P (t) = 1;
289 DECL_CONTEXT (t) = decl;
290 DECL_RESULT (decl) = t;
291 push_struct_function (fndecl: decl);
292 cfun->function_end_locus = loc;
293 init_tree_ssa (cfun);
294 return decl;
295}
296
297struct lower_assumption_data
298{
299 copy_body_data id;
300 tree return_false_label;
301 tree guard_copy;
302 auto_vec<tree> decls;
303};
304
305/* Helper function for lower_assumptions. Find local vars and labels
306 in the assumption sequence and remove debug stmts. */
307
308static tree
309find_assumption_locals_r (gimple_stmt_iterator *gsi_p, bool *,
310 struct walk_stmt_info *wi)
311{
312 lower_assumption_data *data = (lower_assumption_data *) wi->info;
313 gimple *stmt = gsi_stmt (i: *gsi_p);
314 tree lhs = gimple_get_lhs (stmt);
315 if (lhs && TREE_CODE (lhs) == SSA_NAME)
316 {
317 gcc_assert (SSA_NAME_VAR (lhs) == NULL_TREE);
318 data->id.decl_map->put (k: lhs, NULL_TREE);
319 data->decls.safe_push (obj: lhs);
320 }
321 switch (gimple_code (g: stmt))
322 {
323 case GIMPLE_BIND:
324 for (tree var = gimple_bind_vars (bind_stmt: as_a <gbind *> (p: stmt));
325 var; var = DECL_CHAIN (var))
326 if (VAR_P (var)
327 && !DECL_EXTERNAL (var)
328 && DECL_CONTEXT (var) == data->id.src_fn)
329 {
330 data->id.decl_map->put (k: var, v: var);
331 data->decls.safe_push (obj: var);
332 }
333 break;
334 case GIMPLE_LABEL:
335 {
336 tree label = gimple_label_label (gs: as_a <glabel *> (p: stmt));
337 data->id.decl_map->put (k: label, v: label);
338 break;
339 }
340 case GIMPLE_RETURN:
341 /* If something in assumption tries to return from parent function,
342 if it would be reached in hypothetical evaluation, it would be UB,
343 so transform such returns into return false; */
344 {
345 gimple *g = gimple_build_assign (data->guard_copy, boolean_false_node);
346 gsi_insert_before (gsi_p, g, GSI_SAME_STMT);
347 gimple_return_set_retval (gs: as_a <greturn *> (p: stmt), retval: data->guard_copy);
348 break;
349 }
350 case GIMPLE_DEBUG:
351 /* As assumptions won't be emitted, debug info stmts in them
352 are useless. */
353 gsi_remove (gsi_p, true);
354 wi->removed_stmt = true;
355 break;
356 default:
357 break;
358 }
359 return NULL_TREE;
360}
361
362/* Create a new PARM_DECL that is indentical in all respect to DECL except that
363 DECL can be either a VAR_DECL, a PARM_DECL or RESULT_DECL. The original
364 DECL must come from ID->src_fn and the copy will be part of ID->dst_fn. */
365
366static tree
367assumption_copy_decl (tree decl, copy_body_data *id)
368{
369 tree type = TREE_TYPE (decl);
370
371 if (is_global_var (t: decl))
372 return decl;
373
374 gcc_assert (VAR_P (decl)
375 || TREE_CODE (decl) == PARM_DECL
376 || TREE_CODE (decl) == RESULT_DECL);
377 if (TREE_THIS_VOLATILE (decl))
378 type = build_pointer_type (type);
379 tree copy = build_decl (DECL_SOURCE_LOCATION (decl),
380 PARM_DECL, DECL_NAME (decl), type);
381 if (DECL_PT_UID_SET_P (decl))
382 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
383 TREE_THIS_VOLATILE (copy) = 0;
384 if (TREE_THIS_VOLATILE (decl))
385 TREE_READONLY (copy) = 1;
386 else
387 {
388 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
389 TREE_READONLY (copy) = TREE_READONLY (decl);
390 DECL_NOT_GIMPLE_REG_P (copy) = DECL_NOT_GIMPLE_REG_P (decl);
391 DECL_BY_REFERENCE (copy) = DECL_BY_REFERENCE (decl);
392 }
393 DECL_ARG_TYPE (copy) = type;
394 ((lower_assumption_data *) id)->decls.safe_push (obj: decl);
395 return copy_decl_for_dup_finish (id, decl, copy);
396}
397
398/* Transform gotos out of the assumption into return false. */
399
400static tree
401adjust_assumption_stmt_r (gimple_stmt_iterator *gsi_p, bool *,
402 struct walk_stmt_info *wi)
403{
404 lower_assumption_data *data = (lower_assumption_data *) wi->info;
405 gimple *stmt = gsi_stmt (i: *gsi_p);
406 tree lab = NULL_TREE;
407 unsigned int idx = 0;
408 if (gimple_code (g: stmt) == GIMPLE_GOTO)
409 lab = gimple_goto_dest (gs: stmt);
410 else if (gimple_code (g: stmt) == GIMPLE_COND)
411 {
412 repeat:
413 if (idx == 0)
414 lab = gimple_cond_true_label (gs: as_a <gcond *> (p: stmt));
415 else
416 lab = gimple_cond_false_label (gs: as_a <gcond *> (p: stmt));
417 }
418 else if (gimple_code (g: stmt) == GIMPLE_LABEL)
419 {
420 tree label = gimple_label_label (gs: as_a <glabel *> (p: stmt));
421 DECL_CONTEXT (label) = current_function_decl;
422 }
423 if (lab)
424 {
425 if (!data->id.decl_map->get (k: lab))
426 {
427 if (!data->return_false_label)
428 data->return_false_label
429 = create_artificial_label (UNKNOWN_LOCATION);
430 if (gimple_code (g: stmt) == GIMPLE_GOTO)
431 gimple_goto_set_dest (gs: as_a <ggoto *> (p: stmt),
432 dest: data->return_false_label);
433 else if (idx == 0)
434 gimple_cond_set_true_label (gs: as_a <gcond *> (p: stmt),
435 label: data->return_false_label);
436 else
437 gimple_cond_set_false_label (gs: as_a <gcond *> (p: stmt),
438 label: data->return_false_label);
439 }
440 if (gimple_code (g: stmt) == GIMPLE_COND && idx == 0)
441 {
442 idx = 1;
443 goto repeat;
444 }
445 }
446 return NULL_TREE;
447}
448
449/* Adjust trees in the assumption body. Called through walk_tree. */
450
451static tree
452adjust_assumption_stmt_op (tree *tp, int *, void *datap)
453{
454 struct walk_stmt_info *wi = (struct walk_stmt_info *) datap;
455 lower_assumption_data *data = (lower_assumption_data *) wi->info;
456 tree t = *tp;
457 tree *newt;
458 switch (TREE_CODE (t))
459 {
460 case SSA_NAME:
461 newt = data->id.decl_map->get (k: t);
462 /* There shouldn't be SSA_NAMEs other than ones defined in the
463 assumption's body. */
464 gcc_assert (newt);
465 *tp = *newt;
466 break;
467 case LABEL_DECL:
468 newt = data->id.decl_map->get (k: t);
469 if (newt)
470 *tp = *newt;
471 break;
472 case VAR_DECL:
473 case PARM_DECL:
474 case RESULT_DECL:
475 *tp = remap_decl (decl: t, id: &data->id);
476 if (TREE_THIS_VOLATILE (t) && *tp != t)
477 {
478 *tp = build_simple_mem_ref (*tp);
479 TREE_THIS_NOTRAP (*tp) = 1;
480 }
481 break;
482 default:
483 break;
484 }
485 return NULL_TREE;
486}
487
488/* Lower assumption.
489 The gimplifier transformed:
490 .ASSUME (cond);
491 into:
492 [[assume (guard)]]
493 {
494 guard = cond;
495 }
496 which we should transform into:
497 .ASSUME (&artificial_fn, args...);
498 where artificial_fn will look like:
499 bool artificial_fn (args...)
500 {
501 guard = cond;
502 return guard;
503 }
504 with any debug stmts in the block removed and jumps out of
505 the block or return stmts replaced with return false; */
506
507static void
508lower_assumption (gimple_stmt_iterator *gsi, struct lower_data *data)
509{
510 gimple *stmt = gsi_stmt (i: *gsi);
511 tree guard = gimple_assume_guard (gs: stmt);
512 gimple *bind = gimple_assume_body (gs: stmt);
513 location_t loc = gimple_location (g: stmt);
514 gcc_assert (gimple_code (bind) == GIMPLE_BIND);
515
516 lower_assumption_data lad;
517 hash_map<tree, tree> decl_map;
518 memset (s: &lad.id, c: 0, n: sizeof (lad.id));
519 lad.return_false_label = NULL_TREE;
520 lad.id.src_fn = current_function_decl;
521 lad.id.dst_fn = create_assumption_fn (loc);
522 lad.id.src_cfun = DECL_STRUCT_FUNCTION (lad.id.src_fn);
523 lad.id.decl_map = &decl_map;
524 lad.id.copy_decl = assumption_copy_decl;
525 lad.id.transform_call_graph_edges = CB_CGE_DUPLICATE;
526 lad.id.transform_parameter = true;
527 lad.id.do_not_unshare = true;
528 lad.id.do_not_fold = true;
529 cfun->curr_properties = lad.id.src_cfun->curr_properties;
530 lad.guard_copy = create_tmp_var (boolean_type_node);
531 decl_map.put (k: lad.guard_copy, v: lad.guard_copy);
532 decl_map.put (k: guard, v: lad.guard_copy);
533 cfun->assume_function = 1;
534
535 /* Find variables, labels and SSA_NAMEs local to the assume GIMPLE_BIND. */
536 gimple_stmt_iterator gsi2 = gsi_start (seq&: *gimple_assume_body_ptr (gs: stmt));
537 struct walk_stmt_info wi;
538 memset (s: &wi, c: 0, n: sizeof (wi));
539 wi.info = (void *) &lad;
540 walk_gimple_stmt (&gsi2, find_assumption_locals_r, NULL, &wi);
541 unsigned int sz = lad.decls.length ();
542 for (unsigned i = 0; i < sz; ++i)
543 {
544 tree v = lad.decls[i];
545 tree newv;
546 /* SSA_NAMEs defined in the assume condition should be replaced
547 by new SSA_NAMEs in the artificial function. */
548 if (TREE_CODE (v) == SSA_NAME)
549 {
550 newv = make_ssa_name (var: remap_type (TREE_TYPE (v), id: &lad.id));
551 decl_map.put (k: v, v: newv);
552 }
553 /* Local vars should have context and type adjusted to the
554 new artificial function. */
555 else if (VAR_P (v))
556 {
557 if (is_global_var (t: v) && !DECL_ASSEMBLER_NAME_SET_P (v))
558 DECL_ASSEMBLER_NAME (v);
559 TREE_TYPE (v) = remap_type (TREE_TYPE (v), id: &lad.id);
560 DECL_CONTEXT (v) = current_function_decl;
561 }
562 }
563 /* References to other automatic vars should be replaced by
564 PARM_DECLs to the artificial function. */
565 memset (s: &wi, c: 0, n: sizeof (wi));
566 wi.info = (void *) &lad;
567 walk_gimple_stmt (&gsi2, adjust_assumption_stmt_r,
568 adjust_assumption_stmt_op, &wi);
569
570 /* At the start prepend guard = false; */
571 gimple_seq body = NULL;
572 gimple *g = gimple_build_assign (lad.guard_copy, boolean_false_node);
573 gimple_seq_add_stmt (&body, g);
574 gimple_seq_add_stmt (&body, bind);
575 /* At the end add return guard; */
576 greturn *gr = gimple_build_return (lad.guard_copy);
577 gimple_seq_add_stmt (&body, gr);
578 /* If there were any jumps to labels outside of the condition,
579 replace them with a jump to
580 return_false_label:
581 guard = false;
582 return guard; */
583 if (lad.return_false_label)
584 {
585 g = gimple_build_label (label: lad.return_false_label);
586 gimple_seq_add_stmt (&body, g);
587 g = gimple_build_assign (lad.guard_copy, boolean_false_node);
588 gimple_seq_add_stmt (&body, g);
589 gr = gimple_build_return (lad.guard_copy);
590 gimple_seq_add_stmt (&body, gr);
591 }
592 bind = gimple_build_bind (NULL_TREE, body, NULL_TREE);
593 body = NULL;
594 gimple_seq_add_stmt (&body, bind);
595 gimple_set_body (current_function_decl, body);
596 pop_cfun ();
597
598 tree parms = NULL_TREE;
599 tree parmt = void_list_node;
600 auto_vec<tree, 8> vargs;
601 vargs.safe_grow (len: 1 + (lad.decls.length () - sz), exact: true);
602 /* First argument to IFN_ASSUME will be address of the
603 artificial function. */
604 vargs[0] = build_fold_addr_expr (lad.id.dst_fn);
605 for (unsigned i = lad.decls.length (); i > sz; --i)
606 {
607 tree *v = decl_map.get (k: lad.decls[i - 1]);
608 gcc_assert (v && TREE_CODE (*v) == PARM_DECL);
609 DECL_CHAIN (*v) = parms;
610 parms = *v;
611 parmt = tree_cons (NULL_TREE, TREE_TYPE (*v), parmt);
612 /* Remaining arguments will be the variables/parameters
613 mentioned in the condition. */
614 vargs[i - sz] = lad.decls[i - 1];
615 if (TREE_THIS_VOLATILE (lad.decls[i - 1]))
616 {
617 TREE_ADDRESSABLE (lad.decls[i - 1]) = 1;
618 vargs[i - sz] = build_fold_addr_expr (lad.decls[i - 1]);
619 }
620 /* If they have gimple types, we might need to regimplify
621 them to make the IFN_ASSUME call valid. */
622 if (is_gimple_reg_type (TREE_TYPE (vargs[i - sz]))
623 && !is_gimple_val (vargs[i - sz]))
624 {
625 tree t = make_ssa_name (TREE_TYPE (vargs[i - sz]));
626 g = gimple_build_assign (t, vargs[i - sz]);
627 gsi_insert_before (gsi, g, GSI_SAME_STMT);
628 vargs[i - sz] = t;
629 }
630 }
631 DECL_ARGUMENTS (lad.id.dst_fn) = parms;
632 TREE_TYPE (lad.id.dst_fn) = build_function_type (boolean_type_node, parmt);
633
634 cgraph_node::add_new_function (fndecl: lad.id.dst_fn, lowered: false);
635
636 for (unsigned i = 0; i < sz; ++i)
637 {
638 tree v = lad.decls[i];
639 if (TREE_CODE (v) == SSA_NAME)
640 release_ssa_name (name: v);
641 }
642
643 data->cannot_fallthru = false;
644 /* Replace GIMPLE_ASSUME statement with IFN_ASSUME call. */
645 gcall *call = gimple_build_call_internal_vec (IFN_ASSUME, vargs);
646 gimple_set_location (g: call, location: loc);
647 gsi_replace (gsi, call, true);
648}
649
650/* Lower statement GSI. DATA is passed through the recursion. We try to
651 track the fallthruness of statements and get rid of unreachable return
652 statements in order to prevent the EH lowering pass from adding useless
653 edges that can cause bogus warnings to be issued later; this guess need
654 not be 100% accurate, simply be conservative and reset cannot_fallthru
655 to false if we don't know. */
656
657static void
658lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
659{
660 gimple *stmt = gsi_stmt (i: *gsi);
661
662 gimple_set_block (g: stmt, block: data->block);
663
664 switch (gimple_code (g: stmt))
665 {
666 case GIMPLE_BIND:
667 lower_gimple_bind (gsi, data);
668 /* Propagate fallthruness. */
669 return;
670
671 case GIMPLE_COND:
672 case GIMPLE_GOTO:
673 case GIMPLE_SWITCH:
674 data->cannot_fallthru = true;
675 gsi_next (i: gsi);
676 return;
677
678 case GIMPLE_RETURN:
679 if (data->cannot_fallthru)
680 {
681 gsi_remove (gsi, false);
682 /* Propagate fallthruness. */
683 }
684 else
685 {
686 lower_gimple_return (gsi, data);
687 data->cannot_fallthru = true;
688 }
689 return;
690
691 case GIMPLE_TRY:
692 if (gimple_try_kind (gs: stmt) == GIMPLE_TRY_CATCH)
693 lower_try_catch (gsi, data);
694 else
695 {
696 /* It must be a GIMPLE_TRY_FINALLY. */
697 bool cannot_fallthru;
698 lower_sequence (seq: gimple_try_eval_ptr (gs: stmt), data);
699 cannot_fallthru = data->cannot_fallthru;
700
701 /* The finally clause is always executed after the try clause,
702 so if it does not fall through, then the try-finally will not
703 fall through. Otherwise, if the try clause does not fall
704 through, then when the finally clause falls through it will
705 resume execution wherever the try clause was going. So the
706 whole try-finally will only fall through if both the try
707 clause and the finally clause fall through. */
708 data->cannot_fallthru = false;
709 lower_sequence (seq: gimple_try_cleanup_ptr (gs: stmt), data);
710 data->cannot_fallthru |= cannot_fallthru;
711 gsi_next (i: gsi);
712 }
713 return;
714
715 case GIMPLE_EH_ELSE:
716 {
717 geh_else *eh_else_stmt = as_a <geh_else *> (p: stmt);
718 lower_sequence (seq: gimple_eh_else_n_body_ptr (eh_else_stmt), data);
719 lower_sequence (seq: gimple_eh_else_e_body_ptr (eh_else_stmt), data);
720 }
721 break;
722
723 case GIMPLE_DEBUG:
724 gcc_checking_assert (cfun->debug_nonbind_markers);
725 /* We can't possibly have debug bind stmts before lowering, we
726 first emit them when entering SSA. */
727 gcc_checking_assert (gimple_debug_nonbind_marker_p (stmt));
728 /* Propagate fallthruness. */
729 /* If the function (e.g. from PCH) had debug stmts, but they're
730 disabled for this compilation, remove them. */
731 if (!MAY_HAVE_DEBUG_MARKER_STMTS)
732 gsi_remove (gsi, true);
733 else
734 gsi_next (i: gsi);
735 return;
736
737 case GIMPLE_OMP_STRUCTURED_BLOCK:
738 /* These are supposed to be removed already in OMP lowering. */
739 gcc_unreachable ();
740
741 case GIMPLE_NOP:
742 case GIMPLE_ASM:
743 case GIMPLE_ASSIGN:
744 case GIMPLE_PREDICT:
745 case GIMPLE_LABEL:
746 case GIMPLE_EH_MUST_NOT_THROW:
747 case GIMPLE_OMP_FOR:
748 case GIMPLE_OMP_SCOPE:
749 case GIMPLE_OMP_SECTIONS:
750 case GIMPLE_OMP_SECTIONS_SWITCH:
751 case GIMPLE_OMP_SECTION:
752 case GIMPLE_OMP_SINGLE:
753 case GIMPLE_OMP_MASTER:
754 case GIMPLE_OMP_MASKED:
755 case GIMPLE_OMP_TASKGROUP:
756 case GIMPLE_OMP_ORDERED:
757 case GIMPLE_OMP_SCAN:
758 case GIMPLE_OMP_CRITICAL:
759 case GIMPLE_OMP_RETURN:
760 case GIMPLE_OMP_ATOMIC_LOAD:
761 case GIMPLE_OMP_ATOMIC_STORE:
762 case GIMPLE_OMP_CONTINUE:
763 break;
764
765 case GIMPLE_CALL:
766 {
767 tree decl = gimple_call_fndecl (gs: stmt);
768 unsigned i;
769
770 for (i = 0; i < gimple_call_num_args (gs: stmt); i++)
771 {
772 tree arg = gimple_call_arg (gs: stmt, index: i);
773 if (EXPR_P (arg))
774 TREE_SET_BLOCK (arg, data->block);
775 }
776
777 if (decl
778 && fndecl_built_in_p (node: decl, klass: BUILT_IN_NORMAL))
779 {
780 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
781 {
782 lower_builtin_setjmp (gsi);
783 data->cannot_fallthru = false;
784 return;
785 }
786 else if (DECL_FUNCTION_CODE (decl) == BUILT_IN_POSIX_MEMALIGN
787 && flag_tree_bit_ccp
788 && gimple_builtin_call_types_compatible_p (stmt, decl))
789 {
790 lower_builtin_posix_memalign (gsi);
791 return;
792 }
793 else if (DECL_FUNCTION_CODE (decl) == BUILT_IN_ASSUME_ALIGNED
794 && !optimize)
795 {
796 lower_builtin_assume_aligned (gsi);
797 data->cannot_fallthru = false;
798 gsi_next (i: gsi);
799 return;
800 }
801 }
802
803 if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
804 {
805 data->cannot_fallthru = true;
806 gsi_next (i: gsi);
807 return;
808 }
809
810 if (gimple_call_internal_p (gs: stmt, fn: IFN_ASAN_MARK))
811 {
812 tree base = gimple_call_arg (gs: stmt, index: 1);
813 gcc_checking_assert (TREE_CODE (base) == ADDR_EXPR);
814 tree decl = TREE_OPERAND (base, 0);
815 if (VAR_P (decl) && TREE_STATIC (decl))
816 {
817 /* Don't poison a variable with static storage; it might have
818 gotten marked before gimplify_init_constructor promoted it
819 to static. */
820 gsi_remove (gsi, true);
821 return;
822 }
823 }
824
825 /* We delay folding of built calls from gimplification to
826 here so the IL is in consistent state for the diagnostic
827 machineries job. */
828 if (gimple_call_builtin_p (stmt))
829 fold_stmt (gsi);
830 }
831 break;
832
833 case GIMPLE_OMP_PARALLEL:
834 case GIMPLE_OMP_TASK:
835 case GIMPLE_OMP_TARGET:
836 case GIMPLE_OMP_TEAMS:
837 data->cannot_fallthru = false;
838 lower_omp_directive (gsi, data);
839 data->cannot_fallthru = false;
840 return;
841
842 case GIMPLE_ASSUME:
843 lower_assumption (gsi, data);
844 return;
845
846 case GIMPLE_TRANSACTION:
847 lower_sequence (seq: gimple_transaction_body_ptr (
848 transaction_stmt: as_a <gtransaction *> (p: stmt)),
849 data);
850 break;
851
852 default:
853 gcc_unreachable ();
854 }
855
856 data->cannot_fallthru = false;
857 gsi_next (i: gsi);
858}
859
860/* Lower a bind_expr TSI. DATA is passed through the recursion. */
861
862static void
863lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
864{
865 tree old_block = data->block;
866 gbind *stmt = as_a <gbind *> (p: gsi_stmt (i: *gsi));
867 tree new_block = gimple_bind_block (bind_stmt: stmt);
868
869 if (new_block)
870 {
871 if (new_block == old_block)
872 {
873 /* The outermost block of the original function may not be the
874 outermost statement chain of the gimplified function. So we
875 may see the outermost block just inside the function. */
876 gcc_assert (new_block == DECL_INITIAL (current_function_decl));
877 new_block = NULL;
878 }
879 else
880 {
881 /* We do not expect to handle duplicate blocks. */
882 gcc_assert (!TREE_ASM_WRITTEN (new_block));
883 TREE_ASM_WRITTEN (new_block) = 1;
884
885 /* Block tree may get clobbered by inlining. Normally this would
886 be fixed in rest_of_decl_compilation using block notes, but
887 since we are not going to emit them, it is up to us. */
888 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
889 BLOCK_SUBBLOCKS (old_block) = new_block;
890 BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
891 BLOCK_SUPERCONTEXT (new_block) = old_block;
892
893 data->block = new_block;
894 }
895 }
896
897 record_vars (gimple_bind_vars (bind_stmt: stmt));
898
899 /* Scrap DECL_CHAIN up to BLOCK_VARS to ease GC after we no longer
900 need gimple_bind_vars. */
901 tree next;
902 /* BLOCK_VARS and gimple_bind_vars share a common sub-chain. Find
903 it by marking all BLOCK_VARS. */
904 if (gimple_bind_block (bind_stmt: stmt))
905 for (tree t = BLOCK_VARS (gimple_bind_block (stmt)); t; t = DECL_CHAIN (t))
906 TREE_VISITED (t) = 1;
907 for (tree var = gimple_bind_vars (bind_stmt: stmt);
908 var && ! TREE_VISITED (var); var = next)
909 {
910 next = DECL_CHAIN (var);
911 DECL_CHAIN (var) = NULL_TREE;
912 }
913 /* Unmark BLOCK_VARS. */
914 if (gimple_bind_block (bind_stmt: stmt))
915 for (tree t = BLOCK_VARS (gimple_bind_block (stmt)); t; t = DECL_CHAIN (t))
916 TREE_VISITED (t) = 0;
917
918 lower_sequence (seq: gimple_bind_body_ptr (bind_stmt: stmt), data);
919
920 if (new_block)
921 {
922 gcc_assert (data->block == new_block);
923
924 BLOCK_SUBBLOCKS (new_block)
925 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
926 data->block = old_block;
927 }
928
929 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
930 gsi_insert_seq_before (gsi, gimple_bind_body (gs: stmt), GSI_SAME_STMT);
931 gsi_remove (gsi, false);
932}
933
934/* Same as above, but for a GIMPLE_TRY_CATCH. */
935
936static void
937lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data)
938{
939 bool cannot_fallthru;
940 gimple *stmt = gsi_stmt (i: *gsi);
941 gimple_stmt_iterator i;
942
943 /* We don't handle GIMPLE_TRY_FINALLY. */
944 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
945
946 lower_sequence (seq: gimple_try_eval_ptr (gs: stmt), data);
947 cannot_fallthru = data->cannot_fallthru;
948
949 i = gsi_start (seq&: *gimple_try_cleanup_ptr (gs: stmt));
950 switch (gimple_code (g: gsi_stmt (i)))
951 {
952 case GIMPLE_CATCH:
953 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
954 catch expression and a body. The whole try/catch may fall
955 through iff any of the catch bodies falls through. */
956 for (; !gsi_end_p (i); gsi_next (i: &i))
957 {
958 data->cannot_fallthru = false;
959 lower_sequence (seq: gimple_catch_handler_ptr (
960 catch_stmt: as_a <gcatch *> (p: gsi_stmt (i))),
961 data);
962 if (!data->cannot_fallthru)
963 cannot_fallthru = false;
964 }
965 break;
966
967 case GIMPLE_EH_FILTER:
968 /* The exception filter expression only matters if there is an
969 exception. If the exception does not match EH_FILTER_TYPES,
970 we will execute EH_FILTER_FAILURE, and we will fall through
971 if that falls through. If the exception does match
972 EH_FILTER_TYPES, the stack unwinder will continue up the
973 stack, so we will not fall through. We don't know whether we
974 will throw an exception which matches EH_FILTER_TYPES or not,
975 so we just ignore EH_FILTER_TYPES and assume that we might
976 throw an exception which doesn't match. */
977 data->cannot_fallthru = false;
978 lower_sequence (seq: gimple_eh_filter_failure_ptr (gs: gsi_stmt (i)), data);
979 if (!data->cannot_fallthru)
980 cannot_fallthru = false;
981 break;
982
983 case GIMPLE_DEBUG:
984 gcc_checking_assert (gimple_debug_begin_stmt_p (stmt));
985 break;
986
987 default:
988 /* This case represents statements to be executed when an
989 exception occurs. Those statements are implicitly followed
990 by a GIMPLE_RESX to resume execution after the exception. So
991 in this case the try/catch never falls through. */
992 data->cannot_fallthru = false;
993 lower_sequence (seq: gimple_try_cleanup_ptr (gs: stmt), data);
994 break;
995 }
996
997 data->cannot_fallthru = cannot_fallthru;
998 gsi_next (i: gsi);
999}
1000
1001
1002/* Try to determine whether a TRY_CATCH expression can fall through.
1003 This is a subroutine of gimple_stmt_may_fallthru. */
1004
1005static bool
1006gimple_try_catch_may_fallthru (gtry *stmt)
1007{
1008 gimple_stmt_iterator i;
1009
1010 /* We don't handle GIMPLE_TRY_FINALLY. */
1011 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
1012
1013 /* If the TRY block can fall through, the whole TRY_CATCH can
1014 fall through. */
1015 if (gimple_seq_may_fallthru (gimple_try_eval (gs: stmt)))
1016 return true;
1017
1018 i = gsi_start (seq&: *gimple_try_cleanup_ptr (gs: stmt));
1019 switch (gimple_code (g: gsi_stmt (i)))
1020 {
1021 case GIMPLE_CATCH:
1022 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
1023 catch expression and a body. The whole try/catch may fall
1024 through iff any of the catch bodies falls through. */
1025 for (; !gsi_end_p (i); gsi_next (i: &i))
1026 {
1027 if (gimple_seq_may_fallthru (gimple_catch_handler (
1028 catch_stmt: as_a <gcatch *> (p: gsi_stmt (i)))))
1029 return true;
1030 }
1031 return false;
1032
1033 case GIMPLE_EH_FILTER:
1034 /* The exception filter expression only matters if there is an
1035 exception. If the exception does not match EH_FILTER_TYPES,
1036 we will execute EH_FILTER_FAILURE, and we will fall through
1037 if that falls through. If the exception does match
1038 EH_FILTER_TYPES, the stack unwinder will continue up the
1039 stack, so we will not fall through. We don't know whether we
1040 will throw an exception which matches EH_FILTER_TYPES or not,
1041 so we just ignore EH_FILTER_TYPES and assume that we might
1042 throw an exception which doesn't match. */
1043 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gs: gsi_stmt (i)));
1044
1045 default:
1046 /* This case represents statements to be executed when an
1047 exception occurs. Those statements are implicitly followed
1048 by a GIMPLE_RESX to resume execution after the exception. So
1049 in this case the try/catch never falls through. */
1050 return false;
1051 }
1052}
1053
1054
1055/* Try to determine if we can continue executing the statement
1056 immediately following STMT. This guess need not be 100% accurate;
1057 simply be conservative and return true if we don't know. This is
1058 used only to avoid stupidly generating extra code. If we're wrong,
1059 we'll just delete the extra code later. */
1060
1061bool
1062gimple_stmt_may_fallthru (gimple *stmt)
1063{
1064 if (!stmt)
1065 return true;
1066
1067 switch (gimple_code (g: stmt))
1068 {
1069 case GIMPLE_GOTO:
1070 case GIMPLE_RETURN:
1071 case GIMPLE_RESX:
1072 /* Easy cases. If the last statement of the seq implies
1073 control transfer, then we can't fall through. */
1074 return false;
1075
1076 case GIMPLE_SWITCH:
1077 /* Switch has already been lowered and represents a branch
1078 to a selected label and hence can't fall through. */
1079 return false;
1080
1081 case GIMPLE_COND:
1082 /* GIMPLE_COND's are already lowered into a two-way branch. They
1083 can't fall through. */
1084 return false;
1085
1086 case GIMPLE_BIND:
1087 return gimple_seq_may_fallthru (
1088 gimple_bind_body (gs: as_a <gbind *> (p: stmt)));
1089
1090 case GIMPLE_TRY:
1091 if (gimple_try_kind (gs: stmt) == GIMPLE_TRY_CATCH)
1092 return gimple_try_catch_may_fallthru (stmt: as_a <gtry *> (p: stmt));
1093
1094 /* It must be a GIMPLE_TRY_FINALLY. */
1095
1096 /* The finally clause is always executed after the try clause,
1097 so if it does not fall through, then the try-finally will not
1098 fall through. Otherwise, if the try clause does not fall
1099 through, then when the finally clause falls through it will
1100 resume execution wherever the try clause was going. So the
1101 whole try-finally will only fall through if both the try
1102 clause and the finally clause fall through. */
1103 return (gimple_seq_may_fallthru (gimple_try_eval (gs: stmt))
1104 && gimple_seq_may_fallthru (gimple_try_cleanup (gs: stmt)));
1105
1106 case GIMPLE_EH_ELSE:
1107 {
1108 geh_else *eh_else_stmt = as_a <geh_else *> (p: stmt);
1109 return (gimple_seq_may_fallthru (gimple_eh_else_n_body (eh_else_stmt))
1110 || gimple_seq_may_fallthru (gimple_eh_else_e_body (
1111 eh_else_stmt)));
1112 }
1113
1114 case GIMPLE_CALL:
1115 /* Functions that do not return do not fall through. */
1116 return !gimple_call_noreturn_p (s: stmt);
1117
1118 default:
1119 return true;
1120 }
1121}
1122
1123
1124/* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
1125
1126bool
1127gimple_seq_may_fallthru (gimple_seq seq)
1128{
1129 return gimple_stmt_may_fallthru (stmt: gimple_seq_last_nondebug_stmt (s: seq));
1130}
1131
1132
1133/* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
1134
1135static void
1136lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
1137{
1138 greturn *stmt = as_a <greturn *> (p: gsi_stmt (i: *gsi));
1139 gimple *t;
1140 int i;
1141 return_statements_t tmp_rs;
1142
1143 /* Match this up with an existing return statement that's been created. */
1144 for (i = data->return_statements.length () - 1;
1145 i >= 0; i--)
1146 {
1147 tmp_rs = data->return_statements[i];
1148
1149 if (gimple_return_retval (gs: stmt) == gimple_return_retval (gs: tmp_rs.stmt))
1150 {
1151 /* Remove the line number from the representative return statement.
1152 It now fills in for many such returns. Failure to remove this
1153 will result in incorrect results for coverage analysis. */
1154 gimple_set_location (g: tmp_rs.stmt, UNKNOWN_LOCATION);
1155
1156 goto found;
1157 }
1158 }
1159
1160 /* Not found. Create a new label and record the return statement. */
1161 tmp_rs.label = create_artificial_label (cfun->function_end_locus);
1162 tmp_rs.stmt = stmt;
1163 data->return_statements.safe_push (obj: tmp_rs);
1164
1165 /* Generate a goto statement and remove the return statement. */
1166 found:
1167 /* When not optimizing, make sure user returns are preserved. */
1168 if (!optimize && gimple_has_location (g: stmt))
1169 DECL_ARTIFICIAL (tmp_rs.label) = 0;
1170 t = gimple_build_goto (dest: tmp_rs.label);
1171 /* location includes block. */
1172 gimple_set_location (g: t, location: gimple_location (g: stmt));
1173 gsi_insert_before (gsi, t, GSI_SAME_STMT);
1174 gsi_remove (gsi, false);
1175}
1176
1177/* Lower a __builtin_setjmp GSI.
1178
1179 __builtin_setjmp is passed a pointer to an array of five words (not
1180 all will be used on all machines). It operates similarly to the C
1181 library function of the same name, but is more efficient.
1182
1183 It is lowered into 2 other builtins, namely __builtin_setjmp_setup,
1184 __builtin_setjmp_receiver.
1185
1186 After full lowering, the body of the function should look like:
1187
1188 {
1189 int D.1844;
1190 int D.2844;
1191
1192 [...]
1193
1194 __builtin_setjmp_setup (&buf, &<D1847>);
1195 D.1844 = 0;
1196 goto <D1846>;
1197 <D1847>:;
1198 __builtin_setjmp_receiver (&<D1847>);
1199 D.1844 = 1;
1200 <D1846>:;
1201 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
1202
1203 [...]
1204
1205 __builtin_setjmp_setup (&buf, &<D2847>);
1206 D.2844 = 0;
1207 goto <D2846>;
1208 <D2847>:;
1209 __builtin_setjmp_receiver (&<D2847>);
1210 D.2844 = 1;
1211 <D2846>:;
1212 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
1213
1214 [...]
1215
1216 <D3850>:;
1217 return;
1218 }
1219
1220 During cfg creation an extra per-function (or per-OpenMP region)
1221 block with ABNORMAL_DISPATCHER internal call will be added, unique
1222 destination of all the abnormal call edges and the unique source of
1223 all the abnormal edges to the receivers, thus keeping the complexity
1224 explosion localized. */
1225
1226static void
1227lower_builtin_setjmp (gimple_stmt_iterator *gsi)
1228{
1229 gimple *stmt = gsi_stmt (i: *gsi);
1230 location_t loc = gimple_location (g: stmt);
1231 tree cont_label = create_artificial_label (loc);
1232 tree next_label = create_artificial_label (loc);
1233 tree dest, t, arg;
1234 gimple *g;
1235
1236 /* __builtin_setjmp_{setup,receiver} aren't ECF_RETURNS_TWICE and for RTL
1237 these builtins are modelled as non-local label jumps to the label
1238 that is passed to these two builtins, so pretend we have a non-local
1239 label during GIMPLE passes too. See PR60003. */
1240 cfun->has_nonlocal_label = 1;
1241
1242 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
1243 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
1244 FORCED_LABEL (next_label) = 1;
1245
1246 tree orig_dest = dest = gimple_call_lhs (gs: stmt);
1247 if (orig_dest && TREE_CODE (orig_dest) == SSA_NAME)
1248 dest = create_tmp_reg (TREE_TYPE (orig_dest));
1249
1250 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
1251 arg = build_addr (next_label);
1252 t = builtin_decl_implicit (fncode: BUILT_IN_SETJMP_SETUP);
1253 g = gimple_build_call (t, 2, gimple_call_arg (gs: stmt, index: 0), arg);
1254 /* location includes block. */
1255 gimple_set_location (g, location: loc);
1256 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1257
1258 /* Build 'DEST = 0' and insert. */
1259 if (dest)
1260 {
1261 g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
1262 gimple_set_location (g, location: loc);
1263 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1264 }
1265
1266 /* Build 'goto CONT_LABEL' and insert. */
1267 g = gimple_build_goto (dest: cont_label);
1268 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1269
1270 /* Build 'NEXT_LABEL:' and insert. */
1271 g = gimple_build_label (label: next_label);
1272 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1273
1274 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
1275 arg = build_addr (next_label);
1276 t = builtin_decl_implicit (fncode: BUILT_IN_SETJMP_RECEIVER);
1277 g = gimple_build_call (t, 1, arg);
1278 gimple_set_location (g, location: loc);
1279 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1280
1281 /* Build 'DEST = 1' and insert. */
1282 if (dest)
1283 {
1284 g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
1285 integer_one_node));
1286 gimple_set_location (g, location: loc);
1287 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1288 }
1289
1290 /* Build 'CONT_LABEL:' and insert. */
1291 g = gimple_build_label (label: cont_label);
1292 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1293
1294 /* Build orig_dest = dest if necessary. */
1295 if (dest != orig_dest)
1296 {
1297 g = gimple_build_assign (orig_dest, dest);
1298 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1299 }
1300
1301 /* Remove the call to __builtin_setjmp. */
1302 gsi_remove (gsi, false);
1303}
1304
1305/* Lower calls to posix_memalign to
1306 res = posix_memalign (ptr, align, size);
1307 if (res == 0)
1308 *ptr = __builtin_assume_aligned (*ptr, align);
1309 or to
1310 void *tem;
1311 res = posix_memalign (&tem, align, size);
1312 if (res == 0)
1313 ptr = __builtin_assume_aligned (tem, align);
1314 in case the first argument was &ptr. That way we can get at the
1315 alignment of the heap pointer in CCP. */
1316
1317static void
1318lower_builtin_posix_memalign (gimple_stmt_iterator *gsi)
1319{
1320 gimple *stmt, *call = gsi_stmt (i: *gsi);
1321 tree pptr = gimple_call_arg (gs: call, index: 0);
1322 tree align = gimple_call_arg (gs: call, index: 1);
1323 tree res = gimple_call_lhs (gs: call);
1324 tree ptr = create_tmp_reg (ptr_type_node);
1325 if (TREE_CODE (pptr) == ADDR_EXPR)
1326 {
1327 tree tem = create_tmp_var (ptr_type_node);
1328 TREE_ADDRESSABLE (tem) = 1;
1329 gimple_call_set_arg (gs: call, index: 0, build_fold_addr_expr (tem));
1330 stmt = gimple_build_assign (ptr, tem);
1331 }
1332 else
1333 stmt = gimple_build_assign (ptr,
1334 fold_build2 (MEM_REF, ptr_type_node, pptr,
1335 build_int_cst (ptr_type_node, 0)));
1336 if (res == NULL_TREE)
1337 {
1338 res = create_tmp_reg (integer_type_node);
1339 gimple_call_set_lhs (gs: call, lhs: res);
1340 }
1341 tree align_label = create_artificial_label (UNKNOWN_LOCATION);
1342 tree noalign_label = create_artificial_label (UNKNOWN_LOCATION);
1343 gimple *cond = gimple_build_cond (EQ_EXPR, res, integer_zero_node,
1344 align_label, noalign_label);
1345 gsi_insert_after (gsi, cond, GSI_NEW_STMT);
1346 gsi_insert_after (gsi, gimple_build_label (label: align_label), GSI_NEW_STMT);
1347 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1348 stmt = gimple_build_call (builtin_decl_implicit (fncode: BUILT_IN_ASSUME_ALIGNED),
1349 2, ptr, align);
1350 gimple_call_set_lhs (gs: stmt, lhs: ptr);
1351 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1352 stmt = gimple_build_assign (fold_build2 (MEM_REF, ptr_type_node, pptr,
1353 build_int_cst (ptr_type_node, 0)),
1354 ptr);
1355 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1356 gsi_insert_after (gsi, gimple_build_label (label: noalign_label), GSI_NEW_STMT);
1357}
1358
1359/* Lower calls to __builtin_assume_aligned when not optimizing. */
1360
1361static void
1362lower_builtin_assume_aligned (gimple_stmt_iterator *gsi)
1363{
1364 gcall *call = as_a <gcall *> (p: gsi_stmt (i: *gsi));
1365
1366 tree lhs = gimple_call_lhs (gs: call);
1367 if (!lhs || !POINTER_TYPE_P (TREE_TYPE (lhs)) || TREE_CODE (lhs) != SSA_NAME)
1368 return;
1369
1370 tree align = gimple_call_arg (gs: call, index: 1);
1371 tree misalign = (gimple_call_num_args (gs: call) > 2
1372 ? gimple_call_arg (gs: call, index: 2) : NULL_TREE);
1373 if (!tree_fits_uhwi_p (align)
1374 || (misalign && !tree_fits_uhwi_p (misalign)))
1375 return;
1376
1377 unsigned aligni = TREE_INT_CST_LOW (align);
1378 unsigned misaligni = misalign ? TREE_INT_CST_LOW (misalign) : 0;
1379 if (aligni <= 1
1380 || (aligni & (aligni - 1)) != 0
1381 || (misaligni & ~(aligni - 1)) != 0)
1382 return;
1383
1384 /* For lowering we simply transfer alignment information to the
1385 result and leave the call otherwise unchanged, it will be elided
1386 at RTL expansion time. */
1387 ptr_info_def *pi = get_ptr_info (lhs);
1388 set_ptr_info_alignment (pi, aligni, misaligni);
1389}
1390
1391
1392/* Record the variables in VARS into function FN. */
1393
1394void
1395record_vars_into (tree vars, tree fn)
1396{
1397 for (; vars; vars = DECL_CHAIN (vars))
1398 {
1399 tree var = vars;
1400
1401 /* BIND_EXPRs contains also function/type/constant declarations
1402 we don't need to care about. */
1403 if (!VAR_P (var))
1404 continue;
1405
1406 /* Nothing to do in this case. */
1407 if (DECL_EXTERNAL (var))
1408 continue;
1409
1410 /* Record the variable. */
1411 add_local_decl (DECL_STRUCT_FUNCTION (fn), d: var);
1412 }
1413}
1414
1415
1416/* Record the variables in VARS into current_function_decl. */
1417
1418void
1419record_vars (tree vars)
1420{
1421 record_vars_into (vars, fn: current_function_decl);
1422}
1423

source code of gcc/gimple-low.cc