1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <[email protected]>,
5 Diego Novillo <[email protected]> and Jason Merrill <[email protected]>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 2, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 02110-1301, USA. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "rtl.h"
30 #include "varray.h"
31 #include "tree-gimple.h"
32 #include "tree-inline.h"
33 #include "diagnostic.h"
34 #include "langhooks.h"
35 #include "langhooks-def.h"
36 #include "tree-flow.h"
37 #include "cgraph.h"
38 #include "timevar.h"
39 #include "except.h"
40 #include "hashtab.h"
41 #include "flags.h"
42 #include "real.h"
43 #include "function.h"
44 #include "output.h"
45 #include "expr.h"
46 #include "ggc.h"
47 #include "toplev.h"
48 #include "target.h"
49 #include "optabs.h"
50 #include "pointer-set.h"
51
52
53 enum gimplify_omp_var_data
54 {
55 GOVD_SEEN = 1,
56 GOVD_EXPLICIT = 2,
57 GOVD_SHARED = 4,
58 GOVD_PRIVATE = 8,
59 GOVD_FIRSTPRIVATE = 16,
60 GOVD_LASTPRIVATE = 32,
61 GOVD_REDUCTION = 64,
62 GOVD_LOCAL = 128,
63 GOVD_DEBUG_PRIVATE = 256,
64 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
65 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL)
66 };
67
68 struct gimplify_omp_ctx
69 {
70 struct gimplify_omp_ctx *outer_context;
71 splay_tree variables;
72 struct pointer_set_t *privatized_types;
73 location_t location;
74 enum omp_clause_default_kind default_kind;
75 bool is_parallel;
76 bool is_combined_parallel;
77 };
78
79 struct gimplify_ctx
80 {
81 struct gimplify_ctx *prev_context;
82
83 tree current_bind_expr;
84 tree temps;
85 tree conditional_cleanups;
86 tree exit_label;
87 tree return_temp;
88
89 VEC(tree,heap) *case_labels;
90 /* The formal temporary table. Should this be persistent? */
91 htab_t temp_htab;
92
93 int conditions;
94 bool save_stack;
95 bool into_ssa;
96 };
97
98 static struct gimplify_ctx *gimplify_ctxp;
99 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
100
101
102
103 /* Formal (expression) temporary table handling: Multiple occurrences of
104 the same scalar expression are evaluated into the same temporary. */
105
106 typedef struct gimple_temp_hash_elt
107 {
108 tree val; /* Key */
109 tree temp; /* Value */
110 } elt_t;
111
112 /* Forward declarations. */
113 static enum gimplify_status gimplify_compound_expr (tree *, tree *, bool);
114 #ifdef ENABLE_CHECKING
115 static bool cpt_same_type (tree a, tree b);
116 #endif
117
118
119 /* Return a hash value for a formal temporary table entry. */
120
121 static hashval_t
gimple_tree_hash(const void * p)122 gimple_tree_hash (const void *p)
123 {
124 tree t = ((const elt_t *) p)->val;
125 return iterative_hash_expr (t, 0);
126 }
127
128 /* Compare two formal temporary table entries. */
129
130 static int
gimple_tree_eq(const void * p1,const void * p2)131 gimple_tree_eq (const void *p1, const void *p2)
132 {
133 tree t1 = ((const elt_t *) p1)->val;
134 tree t2 = ((const elt_t *) p2)->val;
135 enum tree_code code = TREE_CODE (t1);
136
137 if (TREE_CODE (t2) != code
138 || TREE_TYPE (t1) != TREE_TYPE (t2))
139 return 0;
140
141 if (!operand_equal_p (t1, t2, 0))
142 return 0;
143
144 /* Only allow them to compare equal if they also hash equal; otherwise
145 results are nondeterminate, and we fail bootstrap comparison. */
146 gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2));
147
148 return 1;
149 }
150
151 /* Set up a context for the gimplifier. */
152
153 void
push_gimplify_context(void)154 push_gimplify_context (void)
155 {
156 struct gimplify_ctx *c;
157
158 c = (struct gimplify_ctx *) xcalloc (1, sizeof (struct gimplify_ctx));
159 c->prev_context = gimplify_ctxp;
160 if (optimize)
161 c->temp_htab = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free);
162
163 gimplify_ctxp = c;
164 }
165
166 /* Tear down a context for the gimplifier. If BODY is non-null, then
167 put the temporaries into the outer BIND_EXPR. Otherwise, put them
168 in the unexpanded_var_list. */
169
170 void
pop_gimplify_context(tree body)171 pop_gimplify_context (tree body)
172 {
173 struct gimplify_ctx *c = gimplify_ctxp;
174 tree t;
175
176 gcc_assert (c && !c->current_bind_expr);
177 gimplify_ctxp = c->prev_context;
178
179 for (t = c->temps; t ; t = TREE_CHAIN (t))
180 DECL_GIMPLE_FORMAL_TEMP_P (t) = 0;
181
182 if (body)
183 declare_vars (c->temps, body, false);
184 else
185 record_vars (c->temps);
186
187 if (optimize)
188 htab_delete (c->temp_htab);
189 free (c);
190 }
191
192 static void
gimple_push_bind_expr(tree bind)193 gimple_push_bind_expr (tree bind)
194 {
195 TREE_CHAIN (bind) = gimplify_ctxp->current_bind_expr;
196 gimplify_ctxp->current_bind_expr = bind;
197 }
198
199 static void
gimple_pop_bind_expr(void)200 gimple_pop_bind_expr (void)
201 {
202 gimplify_ctxp->current_bind_expr
203 = TREE_CHAIN (gimplify_ctxp->current_bind_expr);
204 }
205
206 tree
gimple_current_bind_expr(void)207 gimple_current_bind_expr (void)
208 {
209 return gimplify_ctxp->current_bind_expr;
210 }
211
212 /* Returns true iff there is a COND_EXPR between us and the innermost
213 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
214
215 static bool
gimple_conditional_context(void)216 gimple_conditional_context (void)
217 {
218 return gimplify_ctxp->conditions > 0;
219 }
220
221 /* Note that we've entered a COND_EXPR. */
222
223 static void
gimple_push_condition(void)224 gimple_push_condition (void)
225 {
226 #ifdef ENABLE_CHECKING
227 if (gimplify_ctxp->conditions == 0)
228 gcc_assert (!gimplify_ctxp->conditional_cleanups);
229 #endif
230 ++(gimplify_ctxp->conditions);
231 }
232
233 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
234 now, add any conditional cleanups we've seen to the prequeue. */
235
236 static void
gimple_pop_condition(tree * pre_p)237 gimple_pop_condition (tree *pre_p)
238 {
239 int conds = --(gimplify_ctxp->conditions);
240
241 gcc_assert (conds >= 0);
242 if (conds == 0)
243 {
244 append_to_statement_list (gimplify_ctxp->conditional_cleanups, pre_p);
245 gimplify_ctxp->conditional_cleanups = NULL_TREE;
246 }
247 }
248
249 /* A stable comparison routine for use with splay trees and DECLs. */
250
251 static int
splay_tree_compare_decl_uid(splay_tree_key xa,splay_tree_key xb)252 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
253 {
254 tree a = (tree) xa;
255 tree b = (tree) xb;
256
257 return DECL_UID (a) - DECL_UID (b);
258 }
259
260 /* Create a new omp construct that deals with variable remapping. */
261
262 static struct gimplify_omp_ctx *
new_omp_context(bool is_parallel,bool is_combined_parallel)263 new_omp_context (bool is_parallel, bool is_combined_parallel)
264 {
265 struct gimplify_omp_ctx *c;
266
267 c = XCNEW (struct gimplify_omp_ctx);
268 c->outer_context = gimplify_omp_ctxp;
269 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
270 c->privatized_types = pointer_set_create ();
271 c->location = input_location;
272 c->is_parallel = is_parallel;
273 c->is_combined_parallel = is_combined_parallel;
274 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
275
276 return c;
277 }
278
279 /* Destroy an omp construct that deals with variable remapping. */
280
281 static void
delete_omp_context(struct gimplify_omp_ctx * c)282 delete_omp_context (struct gimplify_omp_ctx *c)
283 {
284 splay_tree_delete (c->variables);
285 pointer_set_destroy (c->privatized_types);
286 XDELETE (c);
287 }
288
289 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
290 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
291
292 /* A subroutine of append_to_statement_list{,_force}. T is not NULL. */
293
294 static void
append_to_statement_list_1(tree t,tree * list_p)295 append_to_statement_list_1 (tree t, tree *list_p)
296 {
297 tree list = *list_p;
298 tree_stmt_iterator i;
299
300 if (!list)
301 {
302 if (t && TREE_CODE (t) == STATEMENT_LIST)
303 {
304 *list_p = t;
305 return;
306 }
307 *list_p = list = alloc_stmt_list ();
308 }
309
310 i = tsi_last (list);
311 tsi_link_after (&i, t, TSI_CONTINUE_LINKING);
312 }
313
314 /* Add T to the end of the list container pointed to by LIST_P.
315 If T is an expression with no effects, it is ignored. */
316
317 void
append_to_statement_list(tree t,tree * list_p)318 append_to_statement_list (tree t, tree *list_p)
319 {
320 if (t && TREE_SIDE_EFFECTS (t))
321 append_to_statement_list_1 (t, list_p);
322 }
323
324 /* Similar, but the statement is always added, regardless of side effects. */
325
326 void
append_to_statement_list_force(tree t,tree * list_p)327 append_to_statement_list_force (tree t, tree *list_p)
328 {
329 if (t != NULL_TREE)
330 append_to_statement_list_1 (t, list_p);
331 }
332
333 /* Both gimplify the statement T and append it to LIST_P. */
334
335 void
gimplify_and_add(tree t,tree * list_p)336 gimplify_and_add (tree t, tree *list_p)
337 {
338 gimplify_stmt (&t);
339 append_to_statement_list (t, list_p);
340 }
341
342 /* Strip off a legitimate source ending from the input string NAME of
343 length LEN. Rather than having to know the names used by all of
344 our front ends, we strip off an ending of a period followed by
345 up to five characters. (Java uses ".class".) */
346
347 static inline void
remove_suffix(char * name,int len)348 remove_suffix (char *name, int len)
349 {
350 int i;
351
352 for (i = 2; i < 8 && len > i; i++)
353 {
354 if (name[len - i] == '.')
355 {
356 name[len - i] = '\0';
357 break;
358 }
359 }
360 }
361
362 /* Create a nameless artificial label and put it in the current function
363 context. Returns the newly created label. */
364
365 tree
create_artificial_label(void)366 create_artificial_label (void)
367 {
368 tree lab = build_decl (LABEL_DECL, NULL_TREE, void_type_node);
369
370 DECL_ARTIFICIAL (lab) = 1;
371 DECL_IGNORED_P (lab) = 1;
372 DECL_CONTEXT (lab) = current_function_decl;
373 return lab;
374 }
375
376 /* Subroutine for find_single_pointer_decl. */
377
378 static tree
find_single_pointer_decl_1(tree * tp,int * walk_subtrees ATTRIBUTE_UNUSED,void * data)379 find_single_pointer_decl_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
380 void *data)
381 {
382 tree *pdecl = (tree *) data;
383
384 if (DECL_P (*tp) && POINTER_TYPE_P (TREE_TYPE (*tp)))
385 {
386 if (*pdecl)
387 {
388 /* We already found a pointer decl; return anything other
389 than NULL_TREE to unwind from walk_tree signalling that
390 we have a duplicate. */
391 return *tp;
392 }
393 *pdecl = *tp;
394 }
395
396 return NULL_TREE;
397 }
398
399 /* Find the single DECL of pointer type in the tree T and return it.
400 If there are zero or more than one such DECLs, return NULL. */
401
402 static tree
find_single_pointer_decl(tree t)403 find_single_pointer_decl (tree t)
404 {
405 tree decl = NULL_TREE;
406
407 if (walk_tree (&t, find_single_pointer_decl_1, &decl, NULL))
408 {
409 /* find_single_pointer_decl_1 returns a nonzero value, causing
410 walk_tree to return a nonzero value, to indicate that it
411 found more than one pointer DECL. */
412 return NULL_TREE;
413 }
414
415 return decl;
416 }
417
418 /* Create a new temporary name with PREFIX. Returns an identifier. */
419
420 static GTY(()) unsigned int tmp_var_id_num;
421
422 tree
create_tmp_var_name(const char * prefix)423 create_tmp_var_name (const char *prefix)
424 {
425 char *tmp_name;
426
427 if (prefix)
428 {
429 char *preftmp = ASTRDUP (prefix);
430
431 remove_suffix (preftmp, strlen (preftmp));
432 prefix = preftmp;
433 }
434
435 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
436 return get_identifier (tmp_name);
437 }
438
439
440 /* Create a new temporary variable declaration of type TYPE.
441 Does NOT push it into the current binding. */
442
443 tree
create_tmp_var_raw(tree type,const char * prefix)444 create_tmp_var_raw (tree type, const char *prefix)
445 {
446 tree tmp_var;
447 tree new_type;
448
449 /* Make the type of the variable writable. */
450 new_type = build_type_variant (type, 0, 0);
451 TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
452
453 tmp_var = build_decl (VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
454 type);
455
456 /* The variable was declared by the compiler. */
457 DECL_ARTIFICIAL (tmp_var) = 1;
458 /* And we don't want debug info for it. */
459 DECL_IGNORED_P (tmp_var) = 1;
460
461 /* Make the variable writable. */
462 TREE_READONLY (tmp_var) = 0;
463
464 DECL_EXTERNAL (tmp_var) = 0;
465 TREE_STATIC (tmp_var) = 0;
466 TREE_USED (tmp_var) = 1;
467
468 return tmp_var;
469 }
470
471 /* Create a new temporary variable declaration of type TYPE. DOES push the
472 variable into the current binding. Further, assume that this is called
473 only from gimplification or optimization, at which point the creation of
474 certain types are bugs. */
475
476 tree
create_tmp_var(tree type,const char * prefix)477 create_tmp_var (tree type, const char *prefix)
478 {
479 tree tmp_var;
480
481 /* We don't allow types that are addressable (meaning we can't make copies),
482 or incomplete. We also used to reject every variable size objects here,
483 but now support those for which a constant upper bound can be obtained.
484 The processing for variable sizes is performed in gimple_add_tmp_var,
485 point at which it really matters and possibly reached via paths not going
486 through this function, e.g. after direct calls to create_tmp_var_raw. */
487 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
488
489 tmp_var = create_tmp_var_raw (type, prefix);
490 gimple_add_tmp_var (tmp_var);
491 return tmp_var;
492 }
493
494 /* Given a tree, try to return a useful variable name that we can use
495 to prefix a temporary that is being assigned the value of the tree.
496 I.E. given <temp> = &A, return A. */
497
498 const char *
get_name(tree t)499 get_name (tree t)
500 {
501 tree stripped_decl;
502
503 stripped_decl = t;
504 STRIP_NOPS (stripped_decl);
505 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
506 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
507 else
508 {
509 switch (TREE_CODE (stripped_decl))
510 {
511 case ADDR_EXPR:
512 return get_name (TREE_OPERAND (stripped_decl, 0));
513 break;
514 default:
515 return NULL;
516 }
517 }
518 }
519
520 /* Create a temporary with a name derived from VAL. Subroutine of
521 lookup_tmp_var; nobody else should call this function. */
522
523 static inline tree
create_tmp_from_val(tree val)524 create_tmp_from_val (tree val)
525 {
526 return create_tmp_var (TYPE_MAIN_VARIANT (TREE_TYPE (val)), get_name (val));
527 }
528
529 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
530 an existing expression temporary. */
531
532 static tree
lookup_tmp_var(tree val,bool is_formal)533 lookup_tmp_var (tree val, bool is_formal)
534 {
535 tree ret;
536
537 /* If not optimizing, never really reuse a temporary. local-alloc
538 won't allocate any variable that is used in more than one basic
539 block, which means it will go into memory, causing much extra
540 work in reload and final and poorer code generation, outweighing
541 the extra memory allocation here. */
542 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
543 ret = create_tmp_from_val (val);
544 else
545 {
546 elt_t elt, *elt_p;
547 void **slot;
548
549 elt.val = val;
550 slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT);
551 if (*slot == NULL)
552 {
553 elt_p = XNEW (elt_t);
554 elt_p->val = val;
555 elt_p->temp = ret = create_tmp_from_val (val);
556 *slot = (void *) elt_p;
557 }
558 else
559 {
560 elt_p = (elt_t *) *slot;
561 ret = elt_p->temp;
562 }
563 }
564
565 if (is_formal)
566 DECL_GIMPLE_FORMAL_TEMP_P (ret) = 1;
567
568 return ret;
569 }
570
571 /* Returns a formal temporary variable initialized with VAL. PRE_P is as
572 in gimplify_expr. Only use this function if:
573
574 1) The value of the unfactored expression represented by VAL will not
575 change between the initialization and use of the temporary, and
576 2) The temporary will not be otherwise modified.
577
578 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
579 and #2 means it is inappropriate for && temps.
580
581 For other cases, use get_initialized_tmp_var instead. */
582
583 static tree
internal_get_tmp_var(tree val,tree * pre_p,tree * post_p,bool is_formal)584 internal_get_tmp_var (tree val, tree *pre_p, tree *post_p, bool is_formal)
585 {
586 tree t, mod;
587
588 gimplify_expr (&val, pre_p, post_p, is_gimple_formal_tmp_rhs, fb_rvalue);
589
590 t = lookup_tmp_var (val, is_formal);
591
592 if (is_formal)
593 {
594 tree u = find_single_pointer_decl (val);
595
596 if (u && TREE_CODE (u) == VAR_DECL && DECL_BASED_ON_RESTRICT_P (u))
597 u = DECL_GET_RESTRICT_BASE (u);
598 if (u && TYPE_RESTRICT (TREE_TYPE (u)))
599 {
600 if (DECL_BASED_ON_RESTRICT_P (t))
601 gcc_assert (u == DECL_GET_RESTRICT_BASE (t));
602 else
603 {
604 DECL_BASED_ON_RESTRICT_P (t) = 1;
605 SET_DECL_RESTRICT_BASE (t, u);
606 }
607 }
608 }
609
610 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE)
611 DECL_COMPLEX_GIMPLE_REG_P (t) = 1;
612
613 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, val);
614
615 if (EXPR_HAS_LOCATION (val))
616 SET_EXPR_LOCUS (mod, EXPR_LOCUS (val));
617 else
618 SET_EXPR_LOCATION (mod, input_location);
619
620 /* gimplify_modify_expr might want to reduce this further. */
621 gimplify_and_add (mod, pre_p);
622
623 /* If we're gimplifying into ssa, gimplify_modify_expr will have
624 given our temporary an ssa name. Find and return it. */
625 if (gimplify_ctxp->into_ssa)
626 t = TREE_OPERAND (mod, 0);
627
628 return t;
629 }
630
631 /* Returns a formal temporary variable initialized with VAL. PRE_P
632 points to a statement list where side-effects needed to compute VAL
633 should be stored. */
634
635 tree
get_formal_tmp_var(tree val,tree * pre_p)636 get_formal_tmp_var (tree val, tree *pre_p)
637 {
638 return internal_get_tmp_var (val, pre_p, NULL, true);
639 }
640
641 /* Returns a temporary variable initialized with VAL. PRE_P and POST_P
642 are as in gimplify_expr. */
643
644 tree
get_initialized_tmp_var(tree val,tree * pre_p,tree * post_p)645 get_initialized_tmp_var (tree val, tree *pre_p, tree *post_p)
646 {
647 return internal_get_tmp_var (val, pre_p, post_p, false);
648 }
649
650 /* Declares all the variables in VARS in SCOPE. If DEBUG_INFO is
651 true, generate debug info for them; otherwise don't. */
652
653 void
declare_vars(tree vars,tree scope,bool debug_info)654 declare_vars (tree vars, tree scope, bool debug_info)
655 {
656 tree last = vars;
657 if (last)
658 {
659 tree temps, block;
660
661 /* C99 mode puts the default 'return 0;' for main outside the outer
662 braces. So drill down until we find an actual scope. */
663 while (TREE_CODE (scope) == COMPOUND_EXPR)
664 scope = TREE_OPERAND (scope, 0);
665
666 gcc_assert (TREE_CODE (scope) == BIND_EXPR);
667
668 temps = nreverse (last);
669
670 block = BIND_EXPR_BLOCK (scope);
671 if (!block || !debug_info)
672 {
673 TREE_CHAIN (last) = BIND_EXPR_VARS (scope);
674 BIND_EXPR_VARS (scope) = temps;
675 }
676 else
677 {
678 /* We need to attach the nodes both to the BIND_EXPR and to its
679 associated BLOCK for debugging purposes. The key point here
680 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
681 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
682 if (BLOCK_VARS (block))
683 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
684 else
685 {
686 BIND_EXPR_VARS (scope) = chainon (BIND_EXPR_VARS (scope), temps);
687 BLOCK_VARS (block) = temps;
688 }
689 }
690 }
691 }
692
693 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
694 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
695 no such upper bound can be obtained. */
696
697 static void
force_constant_size(tree var)698 force_constant_size (tree var)
699 {
700 /* The only attempt we make is by querying the maximum size of objects
701 of the variable's type. */
702
703 HOST_WIDE_INT max_size;
704
705 gcc_assert (TREE_CODE (var) == VAR_DECL);
706
707 max_size = max_int_size_in_bytes (TREE_TYPE (var));
708
709 gcc_assert (max_size >= 0);
710
711 DECL_SIZE_UNIT (var)
712 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
713 DECL_SIZE (var)
714 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
715 }
716
717 void
gimple_add_tmp_var(tree tmp)718 gimple_add_tmp_var (tree tmp)
719 {
720 gcc_assert (!TREE_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
721
722 /* Later processing assumes that the object size is constant, which might
723 not be true at this point. Force the use of a constant upper bound in
724 this case. */
725 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
726 force_constant_size (tmp);
727
728 DECL_CONTEXT (tmp) = current_function_decl;
729 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
730
731 if (gimplify_ctxp)
732 {
733 TREE_CHAIN (tmp) = gimplify_ctxp->temps;
734 gimplify_ctxp->temps = tmp;
735
736 /* Mark temporaries local within the nearest enclosing parallel. */
737 if (gimplify_omp_ctxp)
738 {
739 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
740 while (ctx && !ctx->is_parallel)
741 ctx = ctx->outer_context;
742 if (ctx)
743 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
744 }
745 }
746 else if (cfun)
747 record_vars (tmp);
748 else
749 declare_vars (tmp, DECL_SAVED_TREE (current_function_decl), false);
750 }
751
752 /* Determines whether to assign a locus to the statement STMT. */
753
754 static bool
should_carry_locus_p(tree stmt)755 should_carry_locus_p (tree stmt)
756 {
757 /* Don't emit a line note for a label. We particularly don't want to
758 emit one for the break label, since it doesn't actually correspond
759 to the beginning of the loop/switch. */
760 if (TREE_CODE (stmt) == LABEL_EXPR)
761 return false;
762
763 /* Do not annotate empty statements, since it confuses gcov. */
764 if (!TREE_SIDE_EFFECTS (stmt))
765 return false;
766
767 return true;
768 }
769
770 static void
annotate_one_with_locus(tree t,location_t locus)771 annotate_one_with_locus (tree t, location_t locus)
772 {
773 if (EXPR_P (t) && ! EXPR_HAS_LOCATION (t) && should_carry_locus_p (t))
774 SET_EXPR_LOCATION (t, locus);
775 }
776
777 void
annotate_all_with_locus(tree * stmt_p,location_t locus)778 annotate_all_with_locus (tree *stmt_p, location_t locus)
779 {
780 tree_stmt_iterator i;
781
782 if (!*stmt_p)
783 return;
784
785 for (i = tsi_start (*stmt_p); !tsi_end_p (i); tsi_next (&i))
786 {
787 tree t = tsi_stmt (i);
788
789 /* Assuming we've already been gimplified, we shouldn't
790 see nested chaining constructs anymore. */
791 gcc_assert (TREE_CODE (t) != STATEMENT_LIST
792 && TREE_CODE (t) != COMPOUND_EXPR);
793
794 annotate_one_with_locus (t, locus);
795 }
796 }
797
798 /* Similar to copy_tree_r() but do not copy SAVE_EXPR or TARGET_EXPR nodes.
799 These nodes model computations that should only be done once. If we
800 were to unshare something like SAVE_EXPR(i++), the gimplification
801 process would create wrong code. */
802
803 static tree
mostly_copy_tree_r(tree * tp,int * walk_subtrees,void * data)804 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
805 {
806 enum tree_code code = TREE_CODE (*tp);
807 /* Don't unshare types, decls, constants and SAVE_EXPR nodes. */
808 if (TREE_CODE_CLASS (code) == tcc_type
809 || TREE_CODE_CLASS (code) == tcc_declaration
810 || TREE_CODE_CLASS (code) == tcc_constant
811 || code == SAVE_EXPR || code == TARGET_EXPR
812 /* We can't do anything sensible with a BLOCK used as an expression,
813 but we also can't just die when we see it because of non-expression
814 uses. So just avert our eyes and cross our fingers. Silly Java. */
815 || code == BLOCK)
816 *walk_subtrees = 0;
817 else
818 {
819 gcc_assert (code != BIND_EXPR);
820 copy_tree_r (tp, walk_subtrees, data);
821 }
822
823 return NULL_TREE;
824 }
825
826 /* Callback for walk_tree to unshare most of the shared trees rooted at
827 *TP. If *TP has been visited already (i.e., TREE_VISITED (*TP) == 1),
828 then *TP is deep copied by calling copy_tree_r.
829
830 This unshares the same trees as copy_tree_r with the exception of
831 SAVE_EXPR nodes. These nodes model computations that should only be
832 done once. If we were to unshare something like SAVE_EXPR(i++), the
833 gimplification process would create wrong code. */
834
835 static tree
copy_if_shared_r(tree * tp,int * walk_subtrees ATTRIBUTE_UNUSED,void * data ATTRIBUTE_UNUSED)836 copy_if_shared_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
837 void *data ATTRIBUTE_UNUSED)
838 {
839 tree t = *tp;
840 enum tree_code code = TREE_CODE (t);
841
842 /* Skip types, decls, and constants. But we do want to look at their
843 types and the bounds of types. Mark them as visited so we properly
844 unmark their subtrees on the unmark pass. If we've already seen them,
845 don't look down further. */
846 if (TREE_CODE_CLASS (code) == tcc_type
847 || TREE_CODE_CLASS (code) == tcc_declaration
848 || TREE_CODE_CLASS (code) == tcc_constant)
849 {
850 if (TREE_VISITED (t))
851 *walk_subtrees = 0;
852 else
853 TREE_VISITED (t) = 1;
854 }
855
856 /* If this node has been visited already, unshare it and don't look
857 any deeper. */
858 else if (TREE_VISITED (t))
859 {
860 walk_tree (tp, mostly_copy_tree_r, NULL, NULL);
861 *walk_subtrees = 0;
862 }
863
864 /* Otherwise, mark the tree as visited and keep looking. */
865 else
866 TREE_VISITED (t) = 1;
867
868 return NULL_TREE;
869 }
870
871 static tree
unmark_visited_r(tree * tp,int * walk_subtrees ATTRIBUTE_UNUSED,void * data ATTRIBUTE_UNUSED)872 unmark_visited_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
873 void *data ATTRIBUTE_UNUSED)
874 {
875 if (TREE_VISITED (*tp))
876 TREE_VISITED (*tp) = 0;
877 else
878 *walk_subtrees = 0;
879
880 return NULL_TREE;
881 }
882
883 /* Unshare all the trees in BODY_P, a pointer into the body of FNDECL, and the
884 bodies of any nested functions if we are unsharing the entire body of
885 FNDECL. */
886
887 static void
unshare_body(tree * body_p,tree fndecl)888 unshare_body (tree *body_p, tree fndecl)
889 {
890 struct cgraph_node *cgn = cgraph_node (fndecl);
891
892 walk_tree (body_p, copy_if_shared_r, NULL, NULL);
893 if (body_p == &DECL_SAVED_TREE (fndecl))
894 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
895 unshare_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
896 }
897
898 /* Likewise, but mark all trees as not visited. */
899
900 static void
unvisit_body(tree * body_p,tree fndecl)901 unvisit_body (tree *body_p, tree fndecl)
902 {
903 struct cgraph_node *cgn = cgraph_node (fndecl);
904
905 walk_tree (body_p, unmark_visited_r, NULL, NULL);
906 if (body_p == &DECL_SAVED_TREE (fndecl))
907 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
908 unvisit_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
909 }
910
911 /* Unshare T and all the trees reached from T via TREE_CHAIN. */
912
913 static void
unshare_all_trees(tree t)914 unshare_all_trees (tree t)
915 {
916 walk_tree (&t, copy_if_shared_r, NULL, NULL);
917 walk_tree (&t, unmark_visited_r, NULL, NULL);
918 }
919
920 /* Unconditionally make an unshared copy of EXPR. This is used when using
921 stored expressions which span multiple functions, such as BINFO_VTABLE,
922 as the normal unsharing process can't tell that they're shared. */
923
924 tree
unshare_expr(tree expr)925 unshare_expr (tree expr)
926 {
927 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
928 return expr;
929 }
930
931 /* A terser interface for building a representation of an exception
932 specification. */
933
934 tree
gimple_build_eh_filter(tree body,tree allowed,tree failure)935 gimple_build_eh_filter (tree body, tree allowed, tree failure)
936 {
937 tree t;
938
939 /* FIXME should the allowed types go in TREE_TYPE? */
940 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
941 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
942
943 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
944 append_to_statement_list (body, &TREE_OPERAND (t, 0));
945
946 return t;
947 }
948
949
950 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
951 contain statements and have a value. Assign its value to a temporary
952 and give it void_type_node. Returns the temporary, or NULL_TREE if
953 WRAPPER was already void. */
954
955 tree
voidify_wrapper_expr(tree wrapper,tree temp)956 voidify_wrapper_expr (tree wrapper, tree temp)
957 {
958 tree type = TREE_TYPE (wrapper);
959 if (type && !VOID_TYPE_P (type))
960 {
961 tree *p;
962
963 /* Set p to point to the body of the wrapper. Loop until we find
964 something that isn't a wrapper. */
965 for (p = &wrapper; p && *p; )
966 {
967 switch (TREE_CODE (*p))
968 {
969 case BIND_EXPR:
970 TREE_SIDE_EFFECTS (*p) = 1;
971 TREE_TYPE (*p) = void_type_node;
972 /* For a BIND_EXPR, the body is operand 1. */
973 p = &BIND_EXPR_BODY (*p);
974 break;
975
976 case CLEANUP_POINT_EXPR:
977 case TRY_FINALLY_EXPR:
978 case TRY_CATCH_EXPR:
979 TREE_SIDE_EFFECTS (*p) = 1;
980 TREE_TYPE (*p) = void_type_node;
981 p = &TREE_OPERAND (*p, 0);
982 break;
983
984 case STATEMENT_LIST:
985 {
986 tree_stmt_iterator i = tsi_last (*p);
987 TREE_SIDE_EFFECTS (*p) = 1;
988 TREE_TYPE (*p) = void_type_node;
989 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
990 }
991 break;
992
993 case COMPOUND_EXPR:
994 /* Advance to the last statement. Set all container types to void. */
995 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
996 {
997 TREE_SIDE_EFFECTS (*p) = 1;
998 TREE_TYPE (*p) = void_type_node;
999 }
1000 break;
1001
1002 default:
1003 goto out;
1004 }
1005 }
1006
1007 out:
1008 if (p == NULL || IS_EMPTY_STMT (*p))
1009 temp = NULL_TREE;
1010 else if (temp)
1011 {
1012 /* The wrapper is on the RHS of an assignment that we're pushing
1013 down. */
1014 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1015 || TREE_CODE (temp) == MODIFY_EXPR);
1016 TREE_OPERAND (temp, 1) = *p;
1017 *p = temp;
1018 }
1019 else
1020 {
1021 temp = create_tmp_var (type, "retval");
1022 *p = build2 (INIT_EXPR, type, temp, *p);
1023 }
1024
1025 return temp;
1026 }
1027
1028 return NULL_TREE;
1029 }
1030
1031 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1032 a temporary through which they communicate. */
1033
1034 static void
build_stack_save_restore(tree * save,tree * restore)1035 build_stack_save_restore (tree *save, tree *restore)
1036 {
1037 tree save_call, tmp_var;
1038
1039 save_call =
1040 build_function_call_expr (implicit_built_in_decls[BUILT_IN_STACK_SAVE],
1041 NULL_TREE);
1042 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1043
1044 *save = build2 (MODIFY_EXPR, ptr_type_node, tmp_var, save_call);
1045 *restore =
1046 build_function_call_expr (implicit_built_in_decls[BUILT_IN_STACK_RESTORE],
1047 tree_cons (NULL_TREE, tmp_var, NULL_TREE));
1048 }
1049
1050 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1051
1052 static enum gimplify_status
gimplify_bind_expr(tree * expr_p,tree * pre_p)1053 gimplify_bind_expr (tree *expr_p, tree *pre_p)
1054 {
1055 tree bind_expr = *expr_p;
1056 bool old_save_stack = gimplify_ctxp->save_stack;
1057 tree t;
1058
1059 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1060
1061 /* Mark variables seen in this bind expr. */
1062 for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t))
1063 {
1064 if (TREE_CODE (t) == VAR_DECL)
1065 {
1066 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1067
1068 /* Mark variable as local. */
1069 if (ctx && !is_global_var (t)
1070 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1071 || splay_tree_lookup (ctx->variables,
1072 (splay_tree_key) t) == NULL))
1073 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
1074
1075 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1076 }
1077
1078 /* Preliminarily mark non-addressed complex variables as eligible
1079 for promotion to gimple registers. We'll transform their uses
1080 as we find them. */
1081 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1082 && !TREE_THIS_VOLATILE (t)
1083 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1084 && !needs_to_live_in_memory (t))
1085 DECL_COMPLEX_GIMPLE_REG_P (t) = 1;
1086 }
1087
1088 gimple_push_bind_expr (bind_expr);
1089 gimplify_ctxp->save_stack = false;
1090
1091 gimplify_to_stmt_list (&BIND_EXPR_BODY (bind_expr));
1092
1093 if (gimplify_ctxp->save_stack)
1094 {
1095 tree stack_save, stack_restore;
1096
1097 /* Save stack on entry and restore it on exit. Add a try_finally
1098 block to achieve this. Note that mudflap depends on the
1099 format of the emitted code: see mx_register_decls(). */
1100 build_stack_save_restore (&stack_save, &stack_restore);
1101
1102 t = build2 (TRY_FINALLY_EXPR, void_type_node,
1103 BIND_EXPR_BODY (bind_expr), NULL_TREE);
1104 append_to_statement_list (stack_restore, &TREE_OPERAND (t, 1));
1105
1106 BIND_EXPR_BODY (bind_expr) = NULL_TREE;
1107 append_to_statement_list (stack_save, &BIND_EXPR_BODY (bind_expr));
1108 append_to_statement_list (t, &BIND_EXPR_BODY (bind_expr));
1109 }
1110
1111 gimplify_ctxp->save_stack = old_save_stack;
1112 gimple_pop_bind_expr ();
1113
1114 if (temp)
1115 {
1116 *expr_p = temp;
1117 append_to_statement_list (bind_expr, pre_p);
1118 return GS_OK;
1119 }
1120 else
1121 return GS_ALL_DONE;
1122 }
1123
1124 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1125 GIMPLE value, it is assigned to a new temporary and the statement is
1126 re-written to return the temporary.
1127
1128 PRE_P points to the list where side effects that must happen before
1129 STMT should be stored. */
1130
1131 static enum gimplify_status
gimplify_return_expr(tree stmt,tree * pre_p)1132 gimplify_return_expr (tree stmt, tree *pre_p)
1133 {
1134 tree ret_expr = TREE_OPERAND (stmt, 0);
1135 tree result_decl, result;
1136
1137 if (!ret_expr || TREE_CODE (ret_expr) == RESULT_DECL
1138 || ret_expr == error_mark_node)
1139 return GS_ALL_DONE;
1140
1141 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1142 result_decl = NULL_TREE;
1143 else
1144 {
1145 result_decl = TREE_OPERAND (ret_expr, 0);
1146 if (TREE_CODE (result_decl) == INDIRECT_REF)
1147 /* See through a return by reference. */
1148 result_decl = TREE_OPERAND (result_decl, 0);
1149
1150 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1151 || TREE_CODE (ret_expr) == INIT_EXPR)
1152 && TREE_CODE (result_decl) == RESULT_DECL);
1153 }
1154
1155 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1156 Recall that aggregate_value_p is FALSE for any aggregate type that is
1157 returned in registers. If we're returning values in registers, then
1158 we don't want to extend the lifetime of the RESULT_DECL, particularly
1159 across another call. In addition, for those aggregates for which
1160 hard_function_value generates a PARALLEL, we'll die during normal
1161 expansion of structure assignments; there's special code in expand_return
1162 to handle this case that does not exist in expand_expr. */
1163 if (!result_decl
1164 || aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1165 result = result_decl;
1166 else if (gimplify_ctxp->return_temp)
1167 result = gimplify_ctxp->return_temp;
1168 else
1169 {
1170 result = create_tmp_var (TREE_TYPE (result_decl), NULL);
1171
1172 /* ??? With complex control flow (usually involving abnormal edges),
1173 we can wind up warning about an uninitialized value for this. Due
1174 to how this variable is constructed and initialized, this is never
1175 true. Give up and never warn. */
1176 TREE_NO_WARNING (result) = 1;
1177
1178 gimplify_ctxp->return_temp = result;
1179 }
1180
1181 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1182 Then gimplify the whole thing. */
1183 if (result != result_decl)
1184 TREE_OPERAND (ret_expr, 0) = result;
1185
1186 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1187
1188 /* If we didn't use a temporary, then the result is just the result_decl.
1189 Otherwise we need a simple copy. This should already be gimple. */
1190 if (result == result_decl)
1191 ret_expr = result;
1192 else
1193 ret_expr = build2 (MODIFY_EXPR, TREE_TYPE (result), result_decl, result);
1194 TREE_OPERAND (stmt, 0) = ret_expr;
1195
1196 return GS_ALL_DONE;
1197 }
1198
1199 /* Gimplifies a DECL_EXPR node *STMT_P by making any necessary allocation
1200 and initialization explicit. */
1201
1202 static enum gimplify_status
gimplify_decl_expr(tree * stmt_p)1203 gimplify_decl_expr (tree *stmt_p)
1204 {
1205 tree stmt = *stmt_p;
1206 tree decl = DECL_EXPR_DECL (stmt);
1207
1208 *stmt_p = NULL_TREE;
1209
1210 if (TREE_TYPE (decl) == error_mark_node)
1211 return GS_ERROR;
1212
1213 if ((TREE_CODE (decl) == TYPE_DECL
1214 || TREE_CODE (decl) == VAR_DECL)
1215 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1216 gimplify_type_sizes (TREE_TYPE (decl), stmt_p);
1217
1218 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1219 {
1220 tree init = DECL_INITIAL (decl);
1221
1222 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1223 {
1224 /* This is a variable-sized decl. Simplify its size and mark it
1225 for deferred expansion. Note that mudflap depends on the format
1226 of the emitted code: see mx_register_decls(). */
1227 tree t, args, addr, ptr_type;
1228
1229 gimplify_one_sizepos (&DECL_SIZE (decl), stmt_p);
1230 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), stmt_p);
1231
1232 /* All occurrences of this decl in final gimplified code will be
1233 replaced by indirection. Setting DECL_VALUE_EXPR does two
1234 things: First, it lets the rest of the gimplifier know what
1235 replacement to use. Second, it lets the debug info know
1236 where to find the value. */
1237 ptr_type = build_pointer_type (TREE_TYPE (decl));
1238 addr = create_tmp_var (ptr_type, get_name (decl));
1239 DECL_IGNORED_P (addr) = 0;
1240 t = build_fold_indirect_ref (addr);
1241 SET_DECL_VALUE_EXPR (decl, t);
1242 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1243
1244 args = tree_cons (NULL, DECL_SIZE_UNIT (decl), NULL);
1245 t = built_in_decls[BUILT_IN_ALLOCA];
1246 t = build_function_call_expr (t, args);
1247 t = fold_convert (ptr_type, t);
1248 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
1249
1250 gimplify_and_add (t, stmt_p);
1251
1252 /* Indicate that we need to restore the stack level when the
1253 enclosing BIND_EXPR is exited. */
1254 gimplify_ctxp->save_stack = true;
1255 }
1256
1257 if (init && init != error_mark_node)
1258 {
1259 if (!TREE_STATIC (decl))
1260 {
1261 DECL_INITIAL (decl) = NULL_TREE;
1262 init = build2 (INIT_EXPR, void_type_node, decl, init);
1263 gimplify_and_add (init, stmt_p);
1264 }
1265 else
1266 /* We must still examine initializers for static variables
1267 as they may contain a label address. */
1268 walk_tree (&init, force_labels_r, NULL, NULL);
1269 }
1270
1271 /* Some front ends do not explicitly declare all anonymous
1272 artificial variables. We compensate here by declaring the
1273 variables, though it would be better if the front ends would
1274 explicitly declare them. */
1275 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1276 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1277 gimple_add_tmp_var (decl);
1278 }
1279
1280 return GS_ALL_DONE;
1281 }
1282
1283 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1284 and replacing the LOOP_EXPR with goto, but if the loop contains an
1285 EXIT_EXPR, we need to append a label for it to jump to. */
1286
1287 static enum gimplify_status
gimplify_loop_expr(tree * expr_p,tree * pre_p)1288 gimplify_loop_expr (tree *expr_p, tree *pre_p)
1289 {
1290 tree saved_label = gimplify_ctxp->exit_label;
1291 tree start_label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
1292 tree jump_stmt = build_and_jump (&LABEL_EXPR_LABEL (start_label));
1293
1294 append_to_statement_list (start_label, pre_p);
1295
1296 gimplify_ctxp->exit_label = NULL_TREE;
1297
1298 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1299
1300 if (gimplify_ctxp->exit_label)
1301 {
1302 append_to_statement_list (jump_stmt, pre_p);
1303 *expr_p = build1 (LABEL_EXPR, void_type_node, gimplify_ctxp->exit_label);
1304 }
1305 else
1306 *expr_p = jump_stmt;
1307
1308 gimplify_ctxp->exit_label = saved_label;
1309
1310 return GS_ALL_DONE;
1311 }
1312
1313 /* Compare two case labels. Because the front end should already have
1314 made sure that case ranges do not overlap, it is enough to only compare
1315 the CASE_LOW values of each case label. */
1316
1317 static int
compare_case_labels(const void * p1,const void * p2)1318 compare_case_labels (const void *p1, const void *p2)
1319 {
1320 tree case1 = *(tree *)p1;
1321 tree case2 = *(tree *)p2;
1322
1323 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
1324 }
1325
1326 /* Sort the case labels in LABEL_VEC in place in ascending order. */
1327
1328 void
sort_case_labels(tree label_vec)1329 sort_case_labels (tree label_vec)
1330 {
1331 size_t len = TREE_VEC_LENGTH (label_vec);
1332 tree default_case = TREE_VEC_ELT (label_vec, len - 1);
1333
1334 if (CASE_LOW (default_case))
1335 {
1336 size_t i;
1337
1338 /* The last label in the vector should be the default case
1339 but it is not. */
1340 for (i = 0; i < len; ++i)
1341 {
1342 tree t = TREE_VEC_ELT (label_vec, i);
1343 if (!CASE_LOW (t))
1344 {
1345 default_case = t;
1346 TREE_VEC_ELT (label_vec, i) = TREE_VEC_ELT (label_vec, len - 1);
1347 TREE_VEC_ELT (label_vec, len - 1) = default_case;
1348 break;
1349 }
1350 }
1351 }
1352
1353 qsort (&TREE_VEC_ELT (label_vec, 0), len - 1, sizeof (tree),
1354 compare_case_labels);
1355 }
1356
1357 /* Gimplify a SWITCH_EXPR, and collect a TREE_VEC of the labels it can
1358 branch to. */
1359
1360 static enum gimplify_status
gimplify_switch_expr(tree * expr_p,tree * pre_p)1361 gimplify_switch_expr (tree *expr_p, tree *pre_p)
1362 {
1363 tree switch_expr = *expr_p;
1364 enum gimplify_status ret;
1365
1366 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL,
1367 is_gimple_val, fb_rvalue);
1368
1369 if (SWITCH_BODY (switch_expr))
1370 {
1371 VEC(tree,heap) *labels, *saved_labels;
1372 tree label_vec, default_case = NULL_TREE;
1373 size_t i, len;
1374
1375 /* If someone can be bothered to fill in the labels, they can
1376 be bothered to null out the body too. */
1377 gcc_assert (!SWITCH_LABELS (switch_expr));
1378
1379 saved_labels = gimplify_ctxp->case_labels;
1380 gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8);
1381
1382 gimplify_to_stmt_list (&SWITCH_BODY (switch_expr));
1383
1384 labels = gimplify_ctxp->case_labels;
1385 gimplify_ctxp->case_labels = saved_labels;
1386
1387 i = 0;
1388 while (i < VEC_length (tree, labels))
1389 {
1390 tree elt = VEC_index (tree, labels, i);
1391 tree low = CASE_LOW (elt);
1392 bool remove_element = FALSE;
1393
1394 if (low)
1395 {
1396 /* Discard empty ranges. */
1397 tree high = CASE_HIGH (elt);
1398 if (high && INT_CST_LT (high, low))
1399 remove_element = TRUE;
1400 }
1401 else
1402 {
1403 /* The default case must be the last label in the list. */
1404 gcc_assert (!default_case);
1405 default_case = elt;
1406 remove_element = TRUE;
1407 }
1408
1409 if (remove_element)
1410 VEC_ordered_remove (tree, labels, i);
1411 else
1412 i++;
1413 }
1414 len = i;
1415
1416 label_vec = make_tree_vec (len + 1);
1417 SWITCH_LABELS (*expr_p) = label_vec;
1418 append_to_statement_list (switch_expr, pre_p);
1419
1420 if (! default_case)
1421 {
1422 /* If the switch has no default label, add one, so that we jump
1423 around the switch body. */
1424 default_case = build3 (CASE_LABEL_EXPR, void_type_node, NULL_TREE,
1425 NULL_TREE, create_artificial_label ());
1426 append_to_statement_list (SWITCH_BODY (switch_expr), pre_p);
1427 *expr_p = build1 (LABEL_EXPR, void_type_node,
1428 CASE_LABEL (default_case));
1429 }
1430 else
1431 *expr_p = SWITCH_BODY (switch_expr);
1432
1433 for (i = 0; i < len; ++i)
1434 TREE_VEC_ELT (label_vec, i) = VEC_index (tree, labels, i);
1435 TREE_VEC_ELT (label_vec, len) = default_case;
1436
1437 VEC_free (tree, heap, labels);
1438
1439 sort_case_labels (label_vec);
1440
1441 SWITCH_BODY (switch_expr) = NULL;
1442 }
1443 else
1444 gcc_assert (SWITCH_LABELS (switch_expr));
1445
1446 return ret;
1447 }
1448
1449 static enum gimplify_status
gimplify_case_label_expr(tree * expr_p)1450 gimplify_case_label_expr (tree *expr_p)
1451 {
1452 tree expr = *expr_p;
1453 struct gimplify_ctx *ctxp;
1454
1455 /* Invalid OpenMP programs can play Duff's Device type games with
1456 #pragma omp parallel. At least in the C front end, we don't
1457 detect such invalid branches until after gimplification. */
1458 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1459 if (ctxp->case_labels)
1460 break;
1461
1462 VEC_safe_push (tree, heap, ctxp->case_labels, expr);
1463 *expr_p = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (expr));
1464 return GS_ALL_DONE;
1465 }
1466
1467 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1468 if necessary. */
1469
1470 tree
build_and_jump(tree * label_p)1471 build_and_jump (tree *label_p)
1472 {
1473 if (label_p == NULL)
1474 /* If there's nowhere to jump, just fall through. */
1475 return NULL_TREE;
1476
1477 if (*label_p == NULL_TREE)
1478 {
1479 tree label = create_artificial_label ();
1480 *label_p = label;
1481 }
1482
1483 return build1 (GOTO_EXPR, void_type_node, *label_p);
1484 }
1485
1486 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1487 This also involves building a label to jump to and communicating it to
1488 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1489
1490 static enum gimplify_status
gimplify_exit_expr(tree * expr_p)1491 gimplify_exit_expr (tree *expr_p)
1492 {
1493 tree cond = TREE_OPERAND (*expr_p, 0);
1494 tree expr;
1495
1496 expr = build_and_jump (&gimplify_ctxp->exit_label);
1497 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1498 *expr_p = expr;
1499
1500 return GS_OK;
1501 }
1502
1503 /* A helper function to be called via walk_tree. Mark all labels under *TP
1504 as being forced. To be called for DECL_INITIAL of static variables. */
1505
1506 tree
force_labels_r(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)1507 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1508 {
1509 if (TYPE_P (*tp))
1510 *walk_subtrees = 0;
1511 if (TREE_CODE (*tp) == LABEL_DECL)
1512 FORCED_LABEL (*tp) = 1;
1513
1514 return NULL_TREE;
1515 }
1516
1517 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1518 different from its canonical type, wrap the whole thing inside a
1519 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1520 type.
1521
1522 The canonical type of a COMPONENT_REF is the type of the field being
1523 referenced--unless the field is a bit-field which can be read directly
1524 in a smaller mode, in which case the canonical type is the
1525 sign-appropriate type corresponding to that mode. */
1526
1527 static void
canonicalize_component_ref(tree * expr_p)1528 canonicalize_component_ref (tree *expr_p)
1529 {
1530 tree expr = *expr_p;
1531 tree type;
1532
1533 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1534
1535 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1536 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1537 else
1538 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1539
1540 if (TREE_TYPE (expr) != type)
1541 {
1542 tree old_type = TREE_TYPE (expr);
1543
1544 /* Set the type of the COMPONENT_REF to the underlying type. */
1545 TREE_TYPE (expr) = type;
1546
1547 /* And wrap the whole thing inside a NOP_EXPR. */
1548 expr = build1 (NOP_EXPR, old_type, expr);
1549
1550 *expr_p = expr;
1551 }
1552 }
1553
1554 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1555 to foo, embed that change in the ADDR_EXPR by converting
1556 T array[U];
1557 (T *)&array
1558 ==>
1559 &array[L]
1560 where L is the lower bound. For simplicity, only do this for constant
1561 lower bound. */
1562
1563 static void
canonicalize_addr_expr(tree * expr_p)1564 canonicalize_addr_expr (tree *expr_p)
1565 {
1566 tree expr = *expr_p;
1567 tree ctype = TREE_TYPE (expr);
1568 tree addr_expr = TREE_OPERAND (expr, 0);
1569 tree atype = TREE_TYPE (addr_expr);
1570 tree dctype, datype, ddatype, otype, obj_expr;
1571
1572 /* Both cast and addr_expr types should be pointers. */
1573 if (!POINTER_TYPE_P (ctype) || !POINTER_TYPE_P (atype))
1574 return;
1575
1576 /* The addr_expr type should be a pointer to an array. */
1577 datype = TREE_TYPE (atype);
1578 if (TREE_CODE (datype) != ARRAY_TYPE)
1579 return;
1580
1581 /* Both cast and addr_expr types should address the same object type. */
1582 dctype = TREE_TYPE (ctype);
1583 ddatype = TREE_TYPE (datype);
1584 if (!lang_hooks.types_compatible_p (ddatype, dctype))
1585 return;
1586
1587 /* The addr_expr and the object type should match. */
1588 obj_expr = TREE_OPERAND (addr_expr, 0);
1589 otype = TREE_TYPE (obj_expr);
1590 if (!lang_hooks.types_compatible_p (otype, datype))
1591 return;
1592
1593 /* The lower bound and element sizes must be constant. */
1594 if (!TYPE_SIZE_UNIT (dctype)
1595 || TREE_CODE (TYPE_SIZE_UNIT (dctype)) != INTEGER_CST
1596 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1597 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1598 return;
1599
1600 /* All checks succeeded. Build a new node to merge the cast. */
1601 *expr_p = build4 (ARRAY_REF, dctype, obj_expr,
1602 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1603 NULL_TREE, NULL_TREE);
1604 *expr_p = build1 (ADDR_EXPR, ctype, *expr_p);
1605 }
1606
1607 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1608 underneath as appropriate. */
1609
1610 static enum gimplify_status
gimplify_conversion(tree * expr_p)1611 gimplify_conversion (tree *expr_p)
1612 {
1613 gcc_assert (TREE_CODE (*expr_p) == NOP_EXPR
1614 || TREE_CODE (*expr_p) == CONVERT_EXPR);
1615
1616 /* Then strip away all but the outermost conversion. */
1617 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1618
1619 /* And remove the outermost conversion if it's useless. */
1620 if (tree_ssa_useless_type_conversion (*expr_p))
1621 *expr_p = TREE_OPERAND (*expr_p, 0);
1622
1623 /* If we still have a conversion at the toplevel,
1624 then canonicalize some constructs. */
1625 if (TREE_CODE (*expr_p) == NOP_EXPR || TREE_CODE (*expr_p) == CONVERT_EXPR)
1626 {
1627 tree sub = TREE_OPERAND (*expr_p, 0);
1628
1629 /* If a NOP conversion is changing the type of a COMPONENT_REF
1630 expression, then canonicalize its type now in order to expose more
1631 redundant conversions. */
1632 if (TREE_CODE (sub) == COMPONENT_REF)
1633 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1634
1635 /* If a NOP conversion is changing a pointer to array of foo
1636 to a pointer to foo, embed that change in the ADDR_EXPR. */
1637 else if (TREE_CODE (sub) == ADDR_EXPR)
1638 canonicalize_addr_expr (expr_p);
1639 }
1640
1641 return GS_OK;
1642 }
1643
1644 /* Gimplify a VAR_DECL or PARM_DECL. Returns GS_OK if we expanded a
1645 DECL_VALUE_EXPR, and it's worth re-examining things. */
1646
1647 static enum gimplify_status
gimplify_var_or_parm_decl(tree * expr_p)1648 gimplify_var_or_parm_decl (tree *expr_p)
1649 {
1650 tree decl = *expr_p;
1651
1652 /* ??? If this is a local variable, and it has not been seen in any
1653 outer BIND_EXPR, then it's probably the result of a duplicate
1654 declaration, for which we've already issued an error. It would
1655 be really nice if the front end wouldn't leak these at all.
1656 Currently the only known culprit is C++ destructors, as seen
1657 in g++.old-deja/g++.jason/binding.C. */
1658 if (TREE_CODE (decl) == VAR_DECL
1659 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1660 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1661 && decl_function_context (decl) == current_function_decl)
1662 {
1663 gcc_assert (errorcount || sorrycount);
1664 return GS_ERROR;
1665 }
1666
1667 /* When within an OpenMP context, notice uses of variables. */
1668 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1669 return GS_ALL_DONE;
1670
1671 /* If the decl is an alias for another expression, substitute it now. */
1672 if (DECL_HAS_VALUE_EXPR_P (decl))
1673 {
1674 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
1675 return GS_OK;
1676 }
1677
1678 return GS_ALL_DONE;
1679 }
1680
1681
1682 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1683 node pointed to by EXPR_P.
1684
1685 compound_lval
1686 : min_lval '[' val ']'
1687 | min_lval '.' ID
1688 | compound_lval '[' val ']'
1689 | compound_lval '.' ID
1690
1691 This is not part of the original SIMPLE definition, which separates
1692 array and member references, but it seems reasonable to handle them
1693 together. Also, this way we don't run into problems with union
1694 aliasing; gcc requires that for accesses through a union to alias, the
1695 union reference must be explicit, which was not always the case when we
1696 were splitting up array and member refs.
1697
1698 PRE_P points to the list where side effects that must happen before
1699 *EXPR_P should be stored.
1700
1701 POST_P points to the list where side effects that must happen after
1702 *EXPR_P should be stored. */
1703
1704 static enum gimplify_status
gimplify_compound_lval(tree * expr_p,tree * pre_p,tree * post_p,fallback_t fallback)1705 gimplify_compound_lval (tree *expr_p, tree *pre_p,
1706 tree *post_p, fallback_t fallback)
1707 {
1708 tree *p;
1709 VEC(tree,heap) *stack;
1710 enum gimplify_status ret = GS_OK, tret;
1711 int i;
1712
1713 /* Create a stack of the subexpressions so later we can walk them in
1714 order from inner to outer. */
1715 stack = VEC_alloc (tree, heap, 10);
1716
1717 /* We can handle anything that get_inner_reference can deal with. */
1718 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1719 {
1720 restart:
1721 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1722 if (TREE_CODE (*p) == INDIRECT_REF)
1723 *p = fold_indirect_ref (*p);
1724
1725 if (handled_component_p (*p))
1726 ;
1727 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1728 additional COMPONENT_REFs. */
1729 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1730 && gimplify_var_or_parm_decl (p) == GS_OK)
1731 goto restart;
1732 else
1733 break;
1734
1735 VEC_safe_push (tree, heap, stack, *p);
1736 }
1737
1738 gcc_assert (VEC_length (tree, stack));
1739
1740 /* Now STACK is a stack of pointers to all the refs we've walked through
1741 and P points to the innermost expression.
1742
1743 Java requires that we elaborated nodes in source order. That
1744 means we must gimplify the inner expression followed by each of
1745 the indices, in order. But we can't gimplify the inner
1746 expression until we deal with any variable bounds, sizes, or
1747 positions in order to deal with PLACEHOLDER_EXPRs.
1748
1749 So we do this in three steps. First we deal with the annotations
1750 for any variables in the components, then we gimplify the base,
1751 then we gimplify any indices, from left to right. */
1752 for (i = VEC_length (tree, stack) - 1; i >= 0; i--)
1753 {
1754 tree t = VEC_index (tree, stack, i);
1755
1756 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1757 {
1758 /* Gimplify the low bound and element type size and put them into
1759 the ARRAY_REF. If these values are set, they have already been
1760 gimplified. */
1761 if (!TREE_OPERAND (t, 2))
1762 {
1763 tree low = unshare_expr (array_ref_low_bound (t));
1764 if (!is_gimple_min_invariant (low))
1765 {
1766 TREE_OPERAND (t, 2) = low;
1767 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1768 is_gimple_formal_tmp_reg, fb_rvalue);
1769 ret = MIN (ret, tret);
1770 }
1771 }
1772
1773 if (!TREE_OPERAND (t, 3))
1774 {
1775 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
1776 tree elmt_size = unshare_expr (array_ref_element_size (t));
1777 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
1778
1779 /* Divide the element size by the alignment of the element
1780 type (above). */
1781 elmt_size = size_binop (EXACT_DIV_EXPR, elmt_size, factor);
1782
1783 if (!is_gimple_min_invariant (elmt_size))
1784 {
1785 TREE_OPERAND (t, 3) = elmt_size;
1786 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
1787 is_gimple_formal_tmp_reg, fb_rvalue);
1788 ret = MIN (ret, tret);
1789 }
1790 }
1791 }
1792 else if (TREE_CODE (t) == COMPONENT_REF)
1793 {
1794 /* Set the field offset into T and gimplify it. */
1795 if (!TREE_OPERAND (t, 2))
1796 {
1797 tree offset = unshare_expr (component_ref_field_offset (t));
1798 tree field = TREE_OPERAND (t, 1);
1799 tree factor
1800 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
1801
1802 /* Divide the offset by its alignment. */
1803 offset = size_binop (EXACT_DIV_EXPR, offset, factor);
1804
1805 if (!is_gimple_min_invariant (offset))
1806 {
1807 TREE_OPERAND (t, 2) = offset;
1808 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1809 is_gimple_formal_tmp_reg, fb_rvalue);
1810 ret = MIN (ret, tret);
1811 }
1812 }
1813 }
1814 }
1815
1816 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
1817 so as to match the min_lval predicate. Failure to do so may result
1818 in the creation of large aggregate temporaries. */
1819 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
1820 fallback | fb_lvalue);
1821 ret = MIN (ret, tret);
1822
1823 /* And finally, the indices and operands to BIT_FIELD_REF. During this
1824 loop we also remove any useless conversions. */
1825 for (; VEC_length (tree, stack) > 0; )
1826 {
1827 tree t = VEC_pop (tree, stack);
1828
1829 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1830 {
1831 /* Gimplify the dimension.
1832 Temporary fix for gcc.c-torture/execute/20040313-1.c.
1833 Gimplify non-constant array indices into a temporary
1834 variable.
1835 FIXME - The real fix is to gimplify post-modify
1836 expressions into a minimal gimple lvalue. However, that
1837 exposes bugs in alias analysis. The alias analyzer does
1838 not handle &PTR->FIELD very well. Will fix after the
1839 branch is merged into mainline (dnovillo 2004-05-03). */
1840 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
1841 {
1842 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
1843 is_gimple_formal_tmp_reg, fb_rvalue);
1844 ret = MIN (ret, tret);
1845 }
1846 }
1847 else if (TREE_CODE (t) == BIT_FIELD_REF)
1848 {
1849 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
1850 is_gimple_val, fb_rvalue);
1851 ret = MIN (ret, tret);
1852 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1853 is_gimple_val, fb_rvalue);
1854 ret = MIN (ret, tret);
1855 }
1856
1857 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
1858
1859 /* The innermost expression P may have originally had TREE_SIDE_EFFECTS
1860 set which would have caused all the outer expressions in EXPR_P
1861 leading to P to also have had TREE_SIDE_EFFECTS set. */
1862 recalculate_side_effects (t);
1863 }
1864
1865 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval, fallback);
1866 ret = MIN (ret, tret);
1867
1868 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
1869 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
1870 {
1871 canonicalize_component_ref (expr_p);
1872 ret = MIN (ret, GS_OK);
1873 }
1874
1875 VEC_free (tree, heap, stack);
1876
1877 return ret;
1878 }
1879
1880 /* Gimplify the self modifying expression pointed to by EXPR_P
1881 (++, --, +=, -=).
1882
1883 PRE_P points to the list where side effects that must happen before
1884 *EXPR_P should be stored.
1885
1886 POST_P points to the list where side effects that must happen after
1887 *EXPR_P should be stored.
1888
1889 WANT_VALUE is nonzero iff we want to use the value of this expression
1890 in another expression. */
1891
1892 static enum gimplify_status
gimplify_self_mod_expr(tree * expr_p,tree * pre_p,tree * post_p,bool want_value)1893 gimplify_self_mod_expr (tree *expr_p, tree *pre_p, tree *post_p,
1894 bool want_value)
1895 {
1896 enum tree_code code;
1897 tree lhs, lvalue, rhs, t1, post = NULL, *orig_post_p = post_p;
1898 bool postfix;
1899 enum tree_code arith_code;
1900 enum gimplify_status ret;
1901
1902 code = TREE_CODE (*expr_p);
1903
1904 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
1905 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
1906
1907 /* Prefix or postfix? */
1908 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
1909 /* Faster to treat as prefix if result is not used. */
1910 postfix = want_value;
1911 else
1912 postfix = false;
1913
1914 /* For postfix, make sure the inner expression's post side effects
1915 are executed after side effects from this expression. */
1916 if (postfix)
1917 post_p = &post;
1918
1919 /* Add or subtract? */
1920 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
1921 arith_code = PLUS_EXPR;
1922 else
1923 arith_code = MINUS_EXPR;
1924
1925 /* Gimplify the LHS into a GIMPLE lvalue. */
1926 lvalue = TREE_OPERAND (*expr_p, 0);
1927 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
1928 if (ret == GS_ERROR)
1929 return ret;
1930
1931 /* Extract the operands to the arithmetic operation. */
1932 lhs = lvalue;
1933 rhs = TREE_OPERAND (*expr_p, 1);
1934
1935 /* For postfix operator, we evaluate the LHS to an rvalue and then use
1936 that as the result value and in the postqueue operation. */
1937 if (postfix)
1938 {
1939 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
1940 if (ret == GS_ERROR)
1941 return ret;
1942 }
1943
1944 t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
1945 t1 = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
1946
1947 if (postfix)
1948 {
1949 gimplify_and_add (t1, orig_post_p);
1950 append_to_statement_list (post, orig_post_p);
1951 *expr_p = lhs;
1952 return GS_ALL_DONE;
1953 }
1954 else
1955 {
1956 *expr_p = t1;
1957 return GS_OK;
1958 }
1959 }
1960
1961 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
1962
1963 static void
maybe_with_size_expr(tree * expr_p)1964 maybe_with_size_expr (tree *expr_p)
1965 {
1966 tree expr = *expr_p;
1967 tree type = TREE_TYPE (expr);
1968 tree size;
1969
1970 /* If we've already wrapped this or the type is error_mark_node, we can't do
1971 anything. */
1972 if (TREE_CODE (expr) == WITH_SIZE_EXPR
1973 || type == error_mark_node)
1974 return;
1975
1976 /* If the size isn't known or is a constant, we have nothing to do. */
1977 size = TYPE_SIZE_UNIT (type);
1978 if (!size || TREE_CODE (size) == INTEGER_CST)
1979 return;
1980
1981 /* Otherwise, make a WITH_SIZE_EXPR. */
1982 size = unshare_expr (size);
1983 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
1984 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
1985 }
1986
1987 /* Subroutine of gimplify_call_expr: Gimplify a single argument. */
1988
1989 static enum gimplify_status
gimplify_arg(tree * expr_p,tree * pre_p)1990 gimplify_arg (tree *expr_p, tree *pre_p)
1991 {
1992 bool (*test) (tree);
1993 fallback_t fb;
1994
1995 /* In general, we allow lvalues for function arguments to avoid
1996 extra overhead of copying large aggregates out of even larger
1997 aggregates into temporaries only to copy the temporaries to
1998 the argument list. Make optimizers happy by pulling out to
1999 temporaries those types that fit in registers. */
2000 if (is_gimple_reg_type (TREE_TYPE (*expr_p)))
2001 test = is_gimple_val, fb = fb_rvalue;
2002 else
2003 test = is_gimple_lvalue, fb = fb_either;
2004
2005 /* If this is a variable sized type, we must remember the size. */
2006 maybe_with_size_expr (expr_p);
2007
2008 /* There is a sequence point before a function call. Side effects in
2009 the argument list must occur before the actual call. So, when
2010 gimplifying arguments, force gimplify_expr to use an internal
2011 post queue which is then appended to the end of PRE_P. */
2012 return gimplify_expr (expr_p, pre_p, NULL, test, fb);
2013 }
2014
2015 /* Gimplify the CALL_EXPR node pointed to by EXPR_P. PRE_P points to the
2016 list where side effects that must happen before *EXPR_P should be stored.
2017 WANT_VALUE is true if the result of the call is desired. */
2018
2019 static enum gimplify_status
gimplify_call_expr(tree * expr_p,tree * pre_p,bool want_value)2020 gimplify_call_expr (tree *expr_p, tree *pre_p, bool want_value)
2021 {
2022 tree decl;
2023 tree arglist;
2024 enum gimplify_status ret;
2025
2026 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2027
2028 /* For reliable diagnostics during inlining, it is necessary that
2029 every call_expr be annotated with file and line. */
2030 if (! EXPR_HAS_LOCATION (*expr_p))
2031 SET_EXPR_LOCATION (*expr_p, input_location);
2032
2033 /* This may be a call to a builtin function.
2034
2035 Builtin function calls may be transformed into different
2036 (and more efficient) builtin function calls under certain
2037 circumstances. Unfortunately, gimplification can muck things
2038 up enough that the builtin expanders are not aware that certain
2039 transformations are still valid.
2040
2041 So we attempt transformation/gimplification of the call before
2042 we gimplify the CALL_EXPR. At this time we do not manage to
2043 transform all calls in the same manner as the expanders do, but
2044 we do transform most of them. */
2045 decl = get_callee_fndecl (*expr_p);
2046 if (decl && DECL_BUILT_IN (decl))
2047 {
2048 tree arglist = TREE_OPERAND (*expr_p, 1);
2049 tree new = fold_builtin (decl, arglist, !want_value);
2050
2051 if (new && new != *expr_p)
2052 {
2053 /* There was a transformation of this call which computes the
2054 same value, but in a more efficient way. Return and try
2055 again. */
2056 *expr_p = new;
2057 return GS_OK;
2058 }
2059
2060 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2061 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_START)
2062 {
2063 if (!arglist || !TREE_CHAIN (arglist))
2064 {
2065 error ("too few arguments to function %<va_start%>");
2066 *expr_p = build_empty_stmt ();
2067 return GS_OK;
2068 }
2069
2070 if (fold_builtin_next_arg (TREE_CHAIN (arglist)))
2071 {
2072 *expr_p = build_empty_stmt ();
2073 return GS_OK;
2074 }
2075 /* Avoid gimplifying the second argument to va_start, which needs
2076 to be the plain PARM_DECL. */
2077 return gimplify_arg (&TREE_VALUE (TREE_OPERAND (*expr_p, 1)), pre_p);
2078 }
2079 }
2080
2081 /* There is a sequence point before the call, so any side effects in
2082 the calling expression must occur before the actual call. Force
2083 gimplify_expr to use an internal post queue. */
2084 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, NULL,
2085 is_gimple_call_addr, fb_rvalue);
2086
2087 if (PUSH_ARGS_REVERSED)
2088 TREE_OPERAND (*expr_p, 1) = nreverse (TREE_OPERAND (*expr_p, 1));
2089 for (arglist = TREE_OPERAND (*expr_p, 1); arglist;
2090 arglist = TREE_CHAIN (arglist))
2091 {
2092 enum gimplify_status t;
2093
2094 t = gimplify_arg (&TREE_VALUE (arglist), pre_p);
2095
2096 if (t == GS_ERROR)
2097 ret = GS_ERROR;
2098 }
2099 if (PUSH_ARGS_REVERSED)
2100 TREE_OPERAND (*expr_p, 1) = nreverse (TREE_OPERAND (*expr_p, 1));
2101
2102 /* Try this again in case gimplification exposed something. */
2103 if (ret != GS_ERROR)
2104 {
2105 decl = get_callee_fndecl (*expr_p);
2106 if (decl && DECL_BUILT_IN (decl))
2107 {
2108 tree arglist = TREE_OPERAND (*expr_p, 1);
2109 tree new = fold_builtin (decl, arglist, !want_value);
2110
2111 if (new && new != *expr_p)
2112 {
2113 /* There was a transformation of this call which computes the
2114 same value, but in a more efficient way. Return and try
2115 again. */
2116 *expr_p = new;
2117 return GS_OK;
2118 }
2119 }
2120 }
2121
2122 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2123 decl. This allows us to eliminate redundant or useless
2124 calls to "const" functions. */
2125 if (TREE_CODE (*expr_p) == CALL_EXPR
2126 && (call_expr_flags (*expr_p) & (ECF_CONST | ECF_PURE)))
2127 TREE_SIDE_EFFECTS (*expr_p) = 0;
2128
2129 return ret;
2130 }
2131
2132 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2133 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2134
2135 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2136 condition is true or false, respectively. If null, we should generate
2137 our own to skip over the evaluation of this specific expression.
2138
2139 This function is the tree equivalent of do_jump.
2140
2141 shortcut_cond_r should only be called by shortcut_cond_expr. */
2142
2143 static tree
shortcut_cond_r(tree pred,tree * true_label_p,tree * false_label_p)2144 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p)
2145 {
2146 tree local_label = NULL_TREE;
2147 tree t, expr = NULL;
2148
2149 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2150 retain the shortcut semantics. Just insert the gotos here;
2151 shortcut_cond_expr will append the real blocks later. */
2152 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2153 {
2154 /* Turn if (a && b) into
2155
2156 if (a); else goto no;
2157 if (b) goto yes; else goto no;
2158 (no:) */
2159
2160 if (false_label_p == NULL)
2161 false_label_p = &local_label;
2162
2163 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p);
2164 append_to_statement_list (t, &expr);
2165
2166 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2167 false_label_p);
2168 append_to_statement_list (t, &expr);
2169 }
2170 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2171 {
2172 /* Turn if (a || b) into
2173
2174 if (a) goto yes;
2175 if (b) goto yes; else goto no;
2176 (yes:) */
2177
2178 if (true_label_p == NULL)
2179 true_label_p = &local_label;
2180
2181 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL);
2182 append_to_statement_list (t, &expr);
2183
2184 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2185 false_label_p);
2186 append_to_statement_list (t, &expr);
2187 }
2188 else if (TREE_CODE (pred) == COND_EXPR)
2189 {
2190 /* As long as we're messing with gotos, turn if (a ? b : c) into
2191 if (a)
2192 if (b) goto yes; else goto no;
2193 else
2194 if (c) goto yes; else goto no; */
2195 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2196 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2197 false_label_p),
2198 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2199 false_label_p));
2200 }
2201 else
2202 {
2203 expr = build3 (COND_EXPR, void_type_node, pred,
2204 build_and_jump (true_label_p),
2205 build_and_jump (false_label_p));
2206 }
2207
2208 if (local_label)
2209 {
2210 t = build1 (LABEL_EXPR, void_type_node, local_label);
2211 append_to_statement_list (t, &expr);
2212 }
2213
2214 return expr;
2215 }
2216
2217 static tree
shortcut_cond_expr(tree expr)2218 shortcut_cond_expr (tree expr)
2219 {
2220 tree pred = TREE_OPERAND (expr, 0);
2221 tree then_ = TREE_OPERAND (expr, 1);
2222 tree else_ = TREE_OPERAND (expr, 2);
2223 tree true_label, false_label, end_label, t;
2224 tree *true_label_p;
2225 tree *false_label_p;
2226 bool emit_end, emit_false, jump_over_else;
2227 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2228 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2229
2230 /* First do simple transformations. */
2231 if (!else_se)
2232 {
2233 /* If there is no 'else', turn (a && b) into if (a) if (b). */
2234 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2235 {
2236 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2237 then_ = shortcut_cond_expr (expr);
2238 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2239 pred = TREE_OPERAND (pred, 0);
2240 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2241 }
2242 }
2243 if (!then_se)
2244 {
2245 /* If there is no 'then', turn
2246 if (a || b); else d
2247 into
2248 if (a); else if (b); else d. */
2249 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2250 {
2251 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2252 else_ = shortcut_cond_expr (expr);
2253 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2254 pred = TREE_OPERAND (pred, 0);
2255 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2256 }
2257 }
2258
2259 /* If we're done, great. */
2260 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2261 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2262 return expr;
2263
2264 /* Otherwise we need to mess with gotos. Change
2265 if (a) c; else d;
2266 to
2267 if (a); else goto no;
2268 c; goto end;
2269 no: d; end:
2270 and recursively gimplify the condition. */
2271
2272 true_label = false_label = end_label = NULL_TREE;
2273
2274 /* If our arms just jump somewhere, hijack those labels so we don't
2275 generate jumps to jumps. */
2276
2277 if (then_
2278 && TREE_CODE (then_) == GOTO_EXPR
2279 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2280 {
2281 true_label = GOTO_DESTINATION (then_);
2282 then_ = NULL;
2283 then_se = false;
2284 }
2285
2286 if (else_
2287 && TREE_CODE (else_) == GOTO_EXPR
2288 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2289 {
2290 false_label = GOTO_DESTINATION (else_);
2291 else_ = NULL;
2292 else_se = false;
2293 }
2294
2295 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2296 if (true_label)
2297 true_label_p = &true_label;
2298 else
2299 true_label_p = NULL;
2300
2301 /* The 'else' branch also needs a label if it contains interesting code. */
2302 if (false_label || else_se)
2303 false_label_p = &false_label;
2304 else
2305 false_label_p = NULL;
2306
2307 /* If there was nothing else in our arms, just forward the label(s). */
2308 if (!then_se && !else_se)
2309 return shortcut_cond_r (pred, true_label_p, false_label_p);
2310
2311 /* If our last subexpression already has a terminal label, reuse it. */
2312 if (else_se)
2313 expr = expr_last (else_);
2314 else if (then_se)
2315 expr = expr_last (then_);
2316 else
2317 expr = NULL;
2318 if (expr && TREE_CODE (expr) == LABEL_EXPR)
2319 end_label = LABEL_EXPR_LABEL (expr);
2320
2321 /* If we don't care about jumping to the 'else' branch, jump to the end
2322 if the condition is false. */
2323 if (!false_label_p)
2324 false_label_p = &end_label;
2325
2326 /* We only want to emit these labels if we aren't hijacking them. */
2327 emit_end = (end_label == NULL_TREE);
2328 emit_false = (false_label == NULL_TREE);
2329
2330 /* We only emit the jump over the else clause if we have to--if the
2331 then clause may fall through. Otherwise we can wind up with a
2332 useless jump and a useless label at the end of gimplified code,
2333 which will cause us to think that this conditional as a whole
2334 falls through even if it doesn't. If we then inline a function
2335 which ends with such a condition, that can cause us to issue an
2336 inappropriate warning about control reaching the end of a
2337 non-void function. */
2338 jump_over_else = block_may_fallthru (then_);
2339
2340 pred = shortcut_cond_r (pred, true_label_p, false_label_p);
2341
2342 expr = NULL;
2343 append_to_statement_list (pred, &expr);
2344
2345 append_to_statement_list (then_, &expr);
2346 if (else_se)
2347 {
2348 if (jump_over_else)
2349 {
2350 t = build_and_jump (&end_label);
2351 append_to_statement_list (t, &expr);
2352 }
2353 if (emit_false)
2354 {
2355 t = build1 (LABEL_EXPR, void_type_node, false_label);
2356 append_to_statement_list (t, &expr);
2357 }
2358 append_to_statement_list (else_, &expr);
2359 }
2360 if (emit_end && end_label)
2361 {
2362 t = build1 (LABEL_EXPR, void_type_node, end_label);
2363 append_to_statement_list (t, &expr);
2364 }
2365
2366 return expr;
2367 }
2368
2369 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2370
2371 tree
gimple_boolify(tree expr)2372 gimple_boolify (tree expr)
2373 {
2374 tree type = TREE_TYPE (expr);
2375
2376 if (TREE_CODE (type) == BOOLEAN_TYPE)
2377 return expr;
2378
2379 switch (TREE_CODE (expr))
2380 {
2381 case TRUTH_AND_EXPR:
2382 case TRUTH_OR_EXPR:
2383 case TRUTH_XOR_EXPR:
2384 case TRUTH_ANDIF_EXPR:
2385 case TRUTH_ORIF_EXPR:
2386 /* Also boolify the arguments of truth exprs. */
2387 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2388 /* FALLTHRU */
2389
2390 case TRUTH_NOT_EXPR:
2391 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2392 /* FALLTHRU */
2393
2394 case EQ_EXPR: case NE_EXPR:
2395 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
2396 /* These expressions always produce boolean results. */
2397 TREE_TYPE (expr) = boolean_type_node;
2398 return expr;
2399
2400 default:
2401 /* Other expressions that get here must have boolean values, but
2402 might need to be converted to the appropriate mode. */
2403 return fold_convert (boolean_type_node, expr);
2404 }
2405 }
2406
2407 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2408 into
2409
2410 if (p) if (p)
2411 t1 = a; a;
2412 else or else
2413 t1 = b; b;
2414 t1;
2415
2416 The second form is used when *EXPR_P is of type void.
2417
2418 TARGET is the tree for T1 above.
2419
2420 PRE_P points to the list where side effects that must happen before
2421 *EXPR_P should be stored. */
2422
2423 static enum gimplify_status
gimplify_cond_expr(tree * expr_p,tree * pre_p,fallback_t fallback)2424 gimplify_cond_expr (tree *expr_p, tree *pre_p, fallback_t fallback)
2425 {
2426 tree expr = *expr_p;
2427 tree tmp, tmp2, type;
2428 enum gimplify_status ret;
2429
2430 type = TREE_TYPE (expr);
2431
2432 /* If this COND_EXPR has a value, copy the values into a temporary within
2433 the arms. */
2434 if (! VOID_TYPE_P (type))
2435 {
2436 tree result;
2437
2438 if ((fallback & fb_lvalue) == 0)
2439 {
2440 result = tmp2 = tmp = create_tmp_var (TREE_TYPE (expr), "iftmp");
2441 ret = GS_ALL_DONE;
2442 }
2443 else
2444 {
2445 tree type = build_pointer_type (TREE_TYPE (expr));
2446
2447 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2448 TREE_OPERAND (expr, 1) =
2449 build_fold_addr_expr (TREE_OPERAND (expr, 1));
2450
2451 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2452 TREE_OPERAND (expr, 2) =
2453 build_fold_addr_expr (TREE_OPERAND (expr, 2));
2454
2455 tmp2 = tmp = create_tmp_var (type, "iftmp");
2456
2457 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (expr, 0),
2458 TREE_OPERAND (expr, 1), TREE_OPERAND (expr, 2));
2459
2460 result = build_fold_indirect_ref (tmp);
2461 ret = GS_ALL_DONE;
2462 }
2463
2464 /* Build the then clause, 't1 = a;'. But don't build an assignment
2465 if this branch is void; in C++ it can be, if it's a throw. */
2466 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2467 TREE_OPERAND (expr, 1)
2468 = build2 (MODIFY_EXPR, void_type_node, tmp, TREE_OPERAND (expr, 1));
2469
2470 /* Build the else clause, 't1 = b;'. */
2471 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2472 TREE_OPERAND (expr, 2)
2473 = build2 (MODIFY_EXPR, void_type_node, tmp2, TREE_OPERAND (expr, 2));
2474
2475 TREE_TYPE (expr) = void_type_node;
2476 recalculate_side_effects (expr);
2477
2478 /* Move the COND_EXPR to the prequeue. */
2479 gimplify_and_add (expr, pre_p);
2480
2481 *expr_p = result;
2482 return ret;
2483 }
2484
2485 /* Make sure the condition has BOOLEAN_TYPE. */
2486 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2487
2488 /* Break apart && and || conditions. */
2489 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
2490 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
2491 {
2492 expr = shortcut_cond_expr (expr);
2493
2494 if (expr != *expr_p)
2495 {
2496 *expr_p = expr;
2497
2498 /* We can't rely on gimplify_expr to re-gimplify the expanded
2499 form properly, as cleanups might cause the target labels to be
2500 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
2501 set up a conditional context. */
2502 gimple_push_condition ();
2503 gimplify_stmt (expr_p);
2504 gimple_pop_condition (pre_p);
2505
2506 return GS_ALL_DONE;
2507 }
2508 }
2509
2510 /* Now do the normal gimplification. */
2511 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
2512 is_gimple_condexpr, fb_rvalue);
2513
2514 gimple_push_condition ();
2515
2516 gimplify_to_stmt_list (&TREE_OPERAND (expr, 1));
2517 gimplify_to_stmt_list (&TREE_OPERAND (expr, 2));
2518 recalculate_side_effects (expr);
2519
2520 gimple_pop_condition (pre_p);
2521
2522 if (ret == GS_ERROR)
2523 ;
2524 else if (TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 1)))
2525 ret = GS_ALL_DONE;
2526 else if (TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 2)))
2527 /* Rewrite "if (a); else b" to "if (!a) b" */
2528 {
2529 TREE_OPERAND (expr, 0) = invert_truthvalue (TREE_OPERAND (expr, 0));
2530 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
2531 is_gimple_condexpr, fb_rvalue);
2532
2533 tmp = TREE_OPERAND (expr, 1);
2534 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 2);
2535 TREE_OPERAND (expr, 2) = tmp;
2536 }
2537 else
2538 /* Both arms are empty; replace the COND_EXPR with its predicate. */
2539 expr = TREE_OPERAND (expr, 0);
2540
2541 *expr_p = expr;
2542 return ret;
2543 }
2544
2545 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
2546 a call to __builtin_memcpy. */
2547
2548 static enum gimplify_status
gimplify_modify_expr_to_memcpy(tree * expr_p,tree size,bool want_value)2549 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value)
2550 {
2551 tree args, t, to, to_ptr, from;
2552
2553 to = TREE_OPERAND (*expr_p, 0);
2554 from = TREE_OPERAND (*expr_p, 1);
2555
2556 args = tree_cons (NULL, size, NULL);
2557
2558 t = build_fold_addr_expr (from);
2559 args = tree_cons (NULL, t, args);
2560
2561 to_ptr = build_fold_addr_expr (to);
2562 args = tree_cons (NULL, to_ptr, args);
2563 t = implicit_built_in_decls[BUILT_IN_MEMCPY];
2564 t = build_function_call_expr (t, args);
2565
2566 if (want_value)
2567 {
2568 t = build1 (NOP_EXPR, TREE_TYPE (to_ptr), t);
2569 t = build1 (INDIRECT_REF, TREE_TYPE (to), t);
2570 }
2571
2572 *expr_p = t;
2573 return GS_OK;
2574 }
2575
2576 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
2577 a call to __builtin_memset. In this case we know that the RHS is
2578 a CONSTRUCTOR with an empty element list. */
2579
2580 static enum gimplify_status
gimplify_modify_expr_to_memset(tree * expr_p,tree size,bool want_value)2581 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value)
2582 {
2583 tree args, t, to, to_ptr;
2584
2585 to = TREE_OPERAND (*expr_p, 0);
2586
2587 args = tree_cons (NULL, size, NULL);
2588
2589 args = tree_cons (NULL, integer_zero_node, args);
2590
2591 to_ptr = build_fold_addr_expr (to);
2592 args = tree_cons (NULL, to_ptr, args);
2593 t = implicit_built_in_decls[BUILT_IN_MEMSET];
2594 t = build_function_call_expr (t, args);
2595
2596 if (want_value)
2597 {
2598 t = build1 (NOP_EXPR, TREE_TYPE (to_ptr), t);
2599 t = build1 (INDIRECT_REF, TREE_TYPE (to), t);
2600 }
2601
2602 *expr_p = t;
2603 return GS_OK;
2604 }
2605
2606 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
2607 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
2608 assignment. Returns non-null if we detect a potential overlap. */
2609
2610 struct gimplify_init_ctor_preeval_data
2611 {
2612 /* The base decl of the lhs object. May be NULL, in which case we
2613 have to assume the lhs is indirect. */
2614 tree lhs_base_decl;
2615
2616 /* The alias set of the lhs object. */
2617 int lhs_alias_set;
2618 };
2619
2620 static tree
gimplify_init_ctor_preeval_1(tree * tp,int * walk_subtrees,void * xdata)2621 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
2622 {
2623 struct gimplify_init_ctor_preeval_data *data
2624 = (struct gimplify_init_ctor_preeval_data *) xdata;
2625 tree t = *tp;
2626
2627 /* If we find the base object, obviously we have overlap. */
2628 if (data->lhs_base_decl == t)
2629 return t;
2630
2631 /* If the constructor component is indirect, determine if we have a
2632 potential overlap with the lhs. The only bits of information we
2633 have to go on at this point are addressability and alias sets. */
2634 if (TREE_CODE (t) == INDIRECT_REF
2635 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
2636 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
2637 return t;
2638
2639 /* If the constructor component is a call, determine if it can hide a
2640 potential overlap with the lhs through an INDIRECT_REF like above. */
2641 if (TREE_CODE (t) == CALL_EXPR)
2642 {
2643 tree type, fntype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2644
2645 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
2646 if (POINTER_TYPE_P (TREE_VALUE (type))
2647 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
2648 && alias_sets_conflict_p (data->lhs_alias_set,
2649 get_alias_set
2650 (TREE_TYPE (TREE_VALUE (type)))))
2651 return t;
2652 }
2653
2654 if (IS_TYPE_OR_DECL_P (t))
2655 *walk_subtrees = 0;
2656 return NULL;
2657 }
2658
2659 /* A subroutine of gimplify_init_constructor. Pre-evaluate *EXPR_P,
2660 force values that overlap with the lhs (as described by *DATA)
2661 into temporaries. */
2662
2663 static void
gimplify_init_ctor_preeval(tree * expr_p,tree * pre_p,tree * post_p,struct gimplify_init_ctor_preeval_data * data)2664 gimplify_init_ctor_preeval (tree *expr_p, tree *pre_p, tree *post_p,
2665 struct gimplify_init_ctor_preeval_data *data)
2666 {
2667 enum gimplify_status one;
2668
2669 /* If the value is invariant, then there's nothing to pre-evaluate.
2670 But ensure it doesn't have any side-effects since a SAVE_EXPR is
2671 invariant but has side effects and might contain a reference to
2672 the object we're initializing. */
2673 if (TREE_INVARIANT (*expr_p) && !TREE_SIDE_EFFECTS (*expr_p))
2674 return;
2675
2676 /* If the type has non-trivial constructors, we can't pre-evaluate. */
2677 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
2678 return;
2679
2680 /* Recurse for nested constructors. */
2681 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
2682 {
2683 unsigned HOST_WIDE_INT ix;
2684 constructor_elt *ce;
2685 VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p);
2686
2687 for (ix = 0; VEC_iterate (constructor_elt, v, ix, ce); ix++)
2688 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
2689 return;
2690 }
2691
2692 /* If this is a variable sized type, we must remember the size. */
2693 maybe_with_size_expr (expr_p);
2694
2695 /* Gimplify the constructor element to something appropriate for the rhs
2696 of a MODIFY_EXPR. Given that we know the lhs is an aggregate, we know
2697 the gimplifier will consider this a store to memory. Doing this
2698 gimplification now means that we won't have to deal with complicated
2699 language-specific trees, nor trees like SAVE_EXPR that can induce
2700 exponential search behavior. */
2701 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
2702 if (one == GS_ERROR)
2703 {
2704 *expr_p = NULL;
2705 return;
2706 }
2707
2708 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
2709 with the lhs, since "a = { .x=a }" doesn't make sense. This will
2710 always be true for all scalars, since is_gimple_mem_rhs insists on a
2711 temporary variable for them. */
2712 if (DECL_P (*expr_p))
2713 return;
2714
2715 /* If this is of variable size, we have no choice but to assume it doesn't
2716 overlap since we can't make a temporary for it. */
2717 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
2718 return;
2719
2720 /* Otherwise, we must search for overlap ... */
2721 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
2722 return;
2723
2724 /* ... and if found, force the value into a temporary. */
2725 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
2726 }
2727
2728 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
2729 a RANGE_EXPR in a CONSTRUCTOR for an array.
2730
2731 var = lower;
2732 loop_entry:
2733 object[var] = value;
2734 if (var == upper)
2735 goto loop_exit;
2736 var = var + 1;
2737 goto loop_entry;
2738 loop_exit:
2739
2740 We increment var _after_ the loop exit check because we might otherwise
2741 fail if upper == TYPE_MAX_VALUE (type for upper).
2742
2743 Note that we never have to deal with SAVE_EXPRs here, because this has
2744 already been taken care of for us, in gimplify_init_ctor_preeval(). */
2745
2746 static void gimplify_init_ctor_eval (tree, VEC(constructor_elt,gc) *,
2747 tree *, bool);
2748
2749 static void
gimplify_init_ctor_eval_range(tree object,tree lower,tree upper,tree value,tree array_elt_type,tree * pre_p,bool cleared)2750 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
2751 tree value, tree array_elt_type,
2752 tree *pre_p, bool cleared)
2753 {
2754 tree loop_entry_label, loop_exit_label;
2755 tree var, var_type, cref;
2756
2757 loop_entry_label = create_artificial_label ();
2758 loop_exit_label = create_artificial_label ();
2759
2760 /* Create and initialize the index variable. */
2761 var_type = TREE_TYPE (upper);
2762 var = create_tmp_var (var_type, NULL);
2763 append_to_statement_list (build2 (MODIFY_EXPR, var_type, var, lower), pre_p);
2764
2765 /* Add the loop entry label. */
2766 append_to_statement_list (build1 (LABEL_EXPR,
2767 void_type_node,
2768 loop_entry_label),
2769 pre_p);
2770
2771 /* Build the reference. */
2772 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
2773 var, NULL_TREE, NULL_TREE);
2774
2775 /* If we are a constructor, just call gimplify_init_ctor_eval to do
2776 the store. Otherwise just assign value to the reference. */
2777
2778 if (TREE_CODE (value) == CONSTRUCTOR)
2779 /* NB we might have to call ourself recursively through
2780 gimplify_init_ctor_eval if the value is a constructor. */
2781 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
2782 pre_p, cleared);
2783 else
2784 append_to_statement_list (build2 (MODIFY_EXPR, TREE_TYPE (cref),
2785 cref, value),
2786 pre_p);
2787
2788 /* We exit the loop when the index var is equal to the upper bound. */
2789 gimplify_and_add (build3 (COND_EXPR, void_type_node,
2790 build2 (EQ_EXPR, boolean_type_node,
2791 var, upper),
2792 build1 (GOTO_EXPR,
2793 void_type_node,
2794 loop_exit_label),
2795 NULL_TREE),
2796 pre_p);
2797
2798 /* Otherwise, increment the index var... */
2799 append_to_statement_list (build2 (MODIFY_EXPR, var_type, var,
2800 build2 (PLUS_EXPR, var_type, var,
2801 fold_convert (var_type,
2802 integer_one_node))),
2803 pre_p);
2804
2805 /* ...and jump back to the loop entry. */
2806 append_to_statement_list (build1 (GOTO_EXPR,
2807 void_type_node,
2808 loop_entry_label),
2809 pre_p);
2810
2811 /* Add the loop exit label. */
2812 append_to_statement_list (build1 (LABEL_EXPR,
2813 void_type_node,
2814 loop_exit_label),
2815 pre_p);
2816 }
2817
2818 /* Return true if FDECL is accessing a field that is zero sized. */
2819
2820 static bool
zero_sized_field_decl(tree fdecl)2821 zero_sized_field_decl (tree fdecl)
2822 {
2823 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
2824 && integer_zerop (DECL_SIZE (fdecl)))
2825 return true;
2826 return false;
2827 }
2828
2829 /* Return true if TYPE is zero sized. */
2830
2831 static bool
zero_sized_type(tree type)2832 zero_sized_type (tree type)
2833 {
2834 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
2835 && integer_zerop (TYPE_SIZE (type)))
2836 return true;
2837 return false;
2838 }
2839
2840 /* A subroutine of gimplify_init_constructor. Generate individual
2841 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
2842 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
2843 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
2844 zeroed first. */
2845
2846 static void
gimplify_init_ctor_eval(tree object,VEC (constructor_elt,gc)* elts,tree * pre_p,bool cleared)2847 gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts,
2848 tree *pre_p, bool cleared)
2849 {
2850 tree array_elt_type = NULL;
2851 unsigned HOST_WIDE_INT ix;
2852 tree purpose, value;
2853
2854 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
2855 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
2856
2857 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
2858 {
2859 tree cref, init;
2860
2861 /* NULL values are created above for gimplification errors. */
2862 if (value == NULL)
2863 continue;
2864
2865 if (cleared && initializer_zerop (value))
2866 continue;
2867
2868 /* ??? Here's to hoping the front end fills in all of the indices,
2869 so we don't have to figure out what's missing ourselves. */
2870 gcc_assert (purpose);
2871
2872 /* Skip zero-sized fields, unless value has side-effects. This can
2873 happen with calls to functions returning a zero-sized type, which
2874 we shouldn't discard. As a number of downstream passes don't
2875 expect sets of zero-sized fields, we rely on the gimplification of
2876 the MODIFY_EXPR we make below to drop the assignment statement. */
2877 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
2878 continue;
2879
2880 /* If we have a RANGE_EXPR, we have to build a loop to assign the
2881 whole range. */
2882 if (TREE_CODE (purpose) == RANGE_EXPR)
2883 {
2884 tree lower = TREE_OPERAND (purpose, 0);
2885 tree upper = TREE_OPERAND (purpose, 1);
2886
2887 /* If the lower bound is equal to upper, just treat it as if
2888 upper was the index. */
2889 if (simple_cst_equal (lower, upper))
2890 purpose = upper;
2891 else
2892 {
2893 gimplify_init_ctor_eval_range (object, lower, upper, value,
2894 array_elt_type, pre_p, cleared);
2895 continue;
2896 }
2897 }
2898
2899 if (array_elt_type)
2900 {
2901 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
2902 purpose, NULL_TREE, NULL_TREE);
2903 }
2904 else
2905 {
2906 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
2907 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
2908 unshare_expr (object), purpose, NULL_TREE);
2909 }
2910
2911 if (TREE_CODE (value) == CONSTRUCTOR
2912 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
2913 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
2914 pre_p, cleared);
2915 else
2916 {
2917 init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
2918 gimplify_and_add (init, pre_p);
2919 }
2920 }
2921 }
2922
2923 /* A subroutine of gimplify_modify_expr. Break out elements of a
2924 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
2925
2926 Note that we still need to clear any elements that don't have explicit
2927 initializers, so if not all elements are initialized we keep the
2928 original MODIFY_EXPR, we just remove all of the constructor elements. */
2929
2930 static enum gimplify_status
gimplify_init_constructor(tree * expr_p,tree * pre_p,tree * post_p,bool want_value)2931 gimplify_init_constructor (tree *expr_p, tree *pre_p,
2932 tree *post_p, bool want_value)
2933 {
2934 tree object;
2935 tree ctor = TREE_OPERAND (*expr_p, 1);
2936 tree type = TREE_TYPE (ctor);
2937 enum gimplify_status ret;
2938 VEC(constructor_elt,gc) *elts;
2939
2940 if (TREE_CODE (ctor) != CONSTRUCTOR)
2941 return GS_UNHANDLED;
2942
2943 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
2944 is_gimple_lvalue, fb_lvalue);
2945 if (ret == GS_ERROR)
2946 return ret;
2947 object = TREE_OPERAND (*expr_p, 0);
2948
2949 elts = CONSTRUCTOR_ELTS (ctor);
2950
2951 ret = GS_ALL_DONE;
2952 switch (TREE_CODE (type))
2953 {
2954 case RECORD_TYPE:
2955 case UNION_TYPE:
2956 case QUAL_UNION_TYPE:
2957 case ARRAY_TYPE:
2958 {
2959 struct gimplify_init_ctor_preeval_data preeval_data;
2960 HOST_WIDE_INT num_type_elements, num_ctor_elements;
2961 HOST_WIDE_INT num_nonzero_elements;
2962 bool cleared, valid_const_initializer;
2963
2964 /* Aggregate types must lower constructors to initialization of
2965 individual elements. The exception is that a CONSTRUCTOR node
2966 with no elements indicates zero-initialization of the whole. */
2967 if (VEC_empty (constructor_elt, elts))
2968 break;
2969
2970 /* Fetch information about the constructor to direct later processing.
2971 We might want to make static versions of it in various cases, and
2972 can only do so if it known to be a valid constant initializer. */
2973 valid_const_initializer
2974 = categorize_ctor_elements (ctor, &num_nonzero_elements,
2975 &num_ctor_elements, &cleared);
2976
2977 /* If a const aggregate variable is being initialized, then it
2978 should never be a lose to promote the variable to be static. */
2979 if (valid_const_initializer
2980 && num_nonzero_elements > 1
2981 && TREE_READONLY (object)
2982 && TREE_CODE (object) == VAR_DECL)
2983 {
2984 DECL_INITIAL (object) = ctor;
2985 TREE_STATIC (object) = 1;
2986 if (!DECL_NAME (object))
2987 DECL_NAME (object) = create_tmp_var_name ("C");
2988 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
2989
2990 /* ??? C++ doesn't automatically append a .<number> to the
2991 assembler name, and even when it does, it looks a FE private
2992 data structures to figure out what that number should be,
2993 which are not set for this variable. I suppose this is
2994 important for local statics for inline functions, which aren't
2995 "local" in the object file sense. So in order to get a unique
2996 TU-local symbol, we must invoke the lhd version now. */
2997 lhd_set_decl_assembler_name (object);
2998
2999 *expr_p = NULL_TREE;
3000 break;
3001 }
3002
3003 /* If there are "lots" of initialized elements, even discounting
3004 those that are not address constants (and thus *must* be
3005 computed at runtime), then partition the constructor into
3006 constant and non-constant parts. Block copy the constant
3007 parts in, then generate code for the non-constant parts. */
3008 /* TODO. There's code in cp/typeck.c to do this. */
3009
3010 num_type_elements = count_type_elements (type, true);
3011
3012 /* If count_type_elements could not determine number of type elements
3013 for a constant-sized object, assume clearing is needed.
3014 Don't do this for variable-sized objects, as store_constructor
3015 will ignore the clearing of variable-sized objects. */
3016 if (num_type_elements < 0 && int_size_in_bytes (type) >= 0)
3017 cleared = true;
3018 /* If there are "lots" of zeros, then block clear the object first. */
3019 else if (num_type_elements - num_nonzero_elements > CLEAR_RATIO
3020 && num_nonzero_elements < num_type_elements/4)
3021 cleared = true;
3022 /* ??? This bit ought not be needed. For any element not present
3023 in the initializer, we should simply set them to zero. Except
3024 we'd need to *find* the elements that are not present, and that
3025 requires trickery to avoid quadratic compile-time behavior in
3026 large cases or excessive memory use in small cases. */
3027 else if (num_ctor_elements < num_type_elements)
3028 cleared = true;
3029
3030 /* If there are "lots" of initialized elements, and all of them
3031 are valid address constants, then the entire initializer can
3032 be dropped to memory, and then memcpy'd out. Don't do this
3033 for sparse arrays, though, as it's more efficient to follow
3034 the standard CONSTRUCTOR behavior of memset followed by
3035 individual element initialization. */
3036 if (valid_const_initializer && !cleared)
3037 {
3038 HOST_WIDE_INT size = int_size_in_bytes (type);
3039 unsigned int align;
3040
3041 /* ??? We can still get unbounded array types, at least
3042 from the C++ front end. This seems wrong, but attempt
3043 to work around it for now. */
3044 if (size < 0)
3045 {
3046 size = int_size_in_bytes (TREE_TYPE (object));
3047 if (size >= 0)
3048 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3049 }
3050
3051 /* Find the maximum alignment we can assume for the object. */
3052 /* ??? Make use of DECL_OFFSET_ALIGN. */
3053 if (DECL_P (object))
3054 align = DECL_ALIGN (object);
3055 else
3056 align = TYPE_ALIGN (type);
3057
3058 if (size > 0 && !can_move_by_pieces (size, align))
3059 {
3060 tree new = create_tmp_var_raw (type, "C");
3061
3062 gimple_add_tmp_var (new);
3063 TREE_STATIC (new) = 1;
3064 TREE_READONLY (new) = 1;
3065 DECL_INITIAL (new) = ctor;
3066 if (align > DECL_ALIGN (new))
3067 {
3068 DECL_ALIGN (new) = align;
3069 DECL_USER_ALIGN (new) = 1;
3070 }
3071 walk_tree (&DECL_INITIAL (new), force_labels_r, NULL, NULL);
3072
3073 TREE_OPERAND (*expr_p, 1) = new;
3074
3075 /* This is no longer an assignment of a CONSTRUCTOR, but
3076 we still may have processing to do on the LHS. So
3077 pretend we didn't do anything here to let that happen. */
3078 return GS_UNHANDLED;
3079 }
3080 }
3081
3082 /* If there are nonzero elements, pre-evaluate to capture elements
3083 overlapping with the lhs into temporaries. We must do this before
3084 clearing to fetch the values before they are zeroed-out. */
3085 if (num_nonzero_elements > 0)
3086 {
3087 preeval_data.lhs_base_decl = get_base_address (object);
3088 if (!DECL_P (preeval_data.lhs_base_decl))
3089 preeval_data.lhs_base_decl = NULL;
3090 preeval_data.lhs_alias_set = get_alias_set (object);
3091
3092 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
3093 pre_p, post_p, &preeval_data);
3094 }
3095
3096 if (cleared)
3097 {
3098 /* Zap the CONSTRUCTOR element list, which simplifies this case.
3099 Note that we still have to gimplify, in order to handle the
3100 case of variable sized types. Avoid shared tree structures. */
3101 CONSTRUCTOR_ELTS (ctor) = NULL;
3102 object = unshare_expr (object);
3103 gimplify_stmt (expr_p);
3104 append_to_statement_list (*expr_p, pre_p);
3105 }
3106
3107 /* If we have not block cleared the object, or if there are nonzero
3108 elements in the constructor, add assignments to the individual
3109 scalar fields of the object. */
3110 if (!cleared || num_nonzero_elements > 0)
3111 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
3112
3113 *expr_p = NULL_TREE;
3114 }
3115 break;
3116
3117 case COMPLEX_TYPE:
3118 {
3119 tree r, i;
3120
3121 /* Extract the real and imaginary parts out of the ctor. */
3122 gcc_assert (VEC_length (constructor_elt, elts) == 2);
3123 r = VEC_index (constructor_elt, elts, 0)->value;
3124 i = VEC_index (constructor_elt, elts, 1)->value;
3125 if (r == NULL || i == NULL)
3126 {
3127 tree zero = fold_convert (TREE_TYPE (type), integer_zero_node);
3128 if (r == NULL)
3129 r = zero;
3130 if (i == NULL)
3131 i = zero;
3132 }
3133
3134 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3135 represent creation of a complex value. */
3136 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3137 {
3138 ctor = build_complex (type, r, i);
3139 TREE_OPERAND (*expr_p, 1) = ctor;
3140 }
3141 else
3142 {
3143 ctor = build2 (COMPLEX_EXPR, type, r, i);
3144 TREE_OPERAND (*expr_p, 1) = ctor;
3145 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
3146 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
3147 fb_rvalue);
3148 }
3149 }
3150 break;
3151
3152 case VECTOR_TYPE:
3153 {
3154 unsigned HOST_WIDE_INT ix;
3155 constructor_elt *ce;
3156
3157 /* Go ahead and simplify constant constructors to VECTOR_CST. */
3158 if (TREE_CONSTANT (ctor))
3159 {
3160 bool constant_p = true;
3161 tree value;
3162
3163 /* Even when ctor is constant, it might contain non-*_CST
3164 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
3165 belong into VECTOR_CST nodes. */
3166 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
3167 if (!CONSTANT_CLASS_P (value))
3168 {
3169 constant_p = false;
3170 break;
3171 }
3172
3173 if (constant_p)
3174 {
3175 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
3176 break;
3177 }
3178
3179 /* Don't reduce a TREE_CONSTANT vector ctor even if we can't
3180 make a VECTOR_CST. It won't do anything for us, and it'll
3181 prevent us from representing it as a single constant. */
3182 break;
3183 }
3184
3185 /* Vector types use CONSTRUCTOR all the way through gimple
3186 compilation as a general initializer. */
3187 for (ix = 0; VEC_iterate (constructor_elt, elts, ix, ce); ix++)
3188 {
3189 enum gimplify_status tret;
3190 tret = gimplify_expr (&ce->value, pre_p, post_p,
3191 is_gimple_val, fb_rvalue);
3192 if (tret == GS_ERROR)
3193 ret = GS_ERROR;
3194 }
3195 }
3196 break;
3197
3198 default:
3199 /* So how did we get a CONSTRUCTOR for a scalar type? */
3200 gcc_unreachable ();
3201 }
3202
3203 if (ret == GS_ERROR)
3204 return GS_ERROR;
3205 else if (want_value)
3206 {
3207 append_to_statement_list (*expr_p, pre_p);
3208 *expr_p = object;
3209 return GS_OK;
3210 }
3211 else
3212 return GS_ALL_DONE;
3213 }
3214
3215 /* Given a pointer value OP0, return a simplified version of an
3216 indirection through OP0, or NULL_TREE if no simplification is
3217 possible. This may only be applied to a rhs of an expression.
3218 Note that the resulting type may be different from the type pointed
3219 to in the sense that it is still compatible from the langhooks
3220 point of view. */
3221
3222 static tree
fold_indirect_ref_rhs(tree t)3223 fold_indirect_ref_rhs (tree t)
3224 {
3225 tree type = TREE_TYPE (TREE_TYPE (t));
3226 tree sub = t;
3227 tree subtype;
3228
3229 STRIP_USELESS_TYPE_CONVERSION (sub);
3230 subtype = TREE_TYPE (sub);
3231 if (!POINTER_TYPE_P (subtype))
3232 return NULL_TREE;
3233
3234 if (TREE_CODE (sub) == ADDR_EXPR)
3235 {
3236 tree op = TREE_OPERAND (sub, 0);
3237 tree optype = TREE_TYPE (op);
3238 /* *&p => p */
3239 if (lang_hooks.types_compatible_p (type, optype))
3240 return op;
3241 /* *(foo *)&fooarray => fooarray[0] */
3242 else if (TREE_CODE (optype) == ARRAY_TYPE
3243 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
3244 {
3245 tree type_domain = TYPE_DOMAIN (optype);
3246 tree min_val = size_zero_node;
3247 if (type_domain && TYPE_MIN_VALUE (type_domain))
3248 min_val = TYPE_MIN_VALUE (type_domain);
3249 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3250 }
3251 }
3252
3253 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3254 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
3255 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
3256 {
3257 tree type_domain;
3258 tree min_val = size_zero_node;
3259 tree osub = sub;
3260 sub = fold_indirect_ref_rhs (sub);
3261 if (! sub)
3262 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
3263 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
3264 if (type_domain && TYPE_MIN_VALUE (type_domain))
3265 min_val = TYPE_MIN_VALUE (type_domain);
3266 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
3267 }
3268
3269 return NULL_TREE;
3270 }
3271
3272 /* Subroutine of gimplify_modify_expr to do simplifications of MODIFY_EXPRs
3273 based on the code of the RHS. We loop for as long as something changes. */
3274
3275 static enum gimplify_status
gimplify_modify_expr_rhs(tree * expr_p,tree * from_p,tree * to_p,tree * pre_p,tree * post_p,bool want_value)3276 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p, tree *pre_p,
3277 tree *post_p, bool want_value)
3278 {
3279 enum gimplify_status ret = GS_OK;
3280
3281 while (ret != GS_UNHANDLED)
3282 switch (TREE_CODE (*from_p))
3283 {
3284 case INDIRECT_REF:
3285 {
3286 /* If we have code like
3287
3288 *(const A*)(A*)&x
3289
3290 where the type of "x" is a (possibly cv-qualified variant
3291 of "A"), treat the entire expression as identical to "x".
3292 This kind of code arises in C++ when an object is bound
3293 to a const reference, and if "x" is a TARGET_EXPR we want
3294 to take advantage of the optimization below. */
3295 tree t = fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
3296 if (t)
3297 {
3298 *from_p = t;
3299 ret = GS_OK;
3300 }
3301 else
3302 ret = GS_UNHANDLED;
3303 break;
3304 }
3305
3306 case TARGET_EXPR:
3307 {
3308 /* If we are initializing something from a TARGET_EXPR, strip the
3309 TARGET_EXPR and initialize it directly, if possible. This can't
3310 be done if the initializer is void, since that implies that the
3311 temporary is set in some non-trivial way.
3312
3313 ??? What about code that pulls out the temp and uses it
3314 elsewhere? I think that such code never uses the TARGET_EXPR as
3315 an initializer. If I'm wrong, we'll die because the temp won't
3316 have any RTL. In that case, I guess we'll need to replace
3317 references somehow. */
3318 tree init = TARGET_EXPR_INITIAL (*from_p);
3319
3320 if (!VOID_TYPE_P (TREE_TYPE (init)))
3321 {
3322 *from_p = init;
3323 ret = GS_OK;
3324 }
3325 else
3326 ret = GS_UNHANDLED;
3327 }
3328 break;
3329
3330 case COMPOUND_EXPR:
3331 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
3332 caught. */
3333 gimplify_compound_expr (from_p, pre_p, true);
3334 ret = GS_OK;
3335 break;
3336
3337 case CONSTRUCTOR:
3338 /* If we're initializing from a CONSTRUCTOR, break this into
3339 individual MODIFY_EXPRs. */
3340 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value);
3341
3342 case COND_EXPR:
3343 /* If we're assigning to a non-register type, push the assignment
3344 down into the branches. This is mandatory for ADDRESSABLE types,
3345 since we cannot generate temporaries for such, but it saves a
3346 copy in other cases as well. */
3347 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
3348 {
3349 /* This code should mirror the code in gimplify_cond_expr. */
3350 enum tree_code code = TREE_CODE (*expr_p);
3351 tree cond = *from_p;
3352 tree result = *to_p;
3353
3354 ret = gimplify_expr (&result, pre_p, post_p,
3355 is_gimple_min_lval, fb_lvalue);
3356 if (ret != GS_ERROR)
3357 ret = GS_OK;
3358
3359 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
3360 TREE_OPERAND (cond, 1)
3361 = build2 (code, void_type_node, result,
3362 TREE_OPERAND (cond, 1));
3363 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
3364 TREE_OPERAND (cond, 2)
3365 = build2 (code, void_type_node, unshare_expr (result),
3366 TREE_OPERAND (cond, 2));
3367
3368 TREE_TYPE (cond) = void_type_node;
3369 recalculate_side_effects (cond);
3370
3371 if (want_value)
3372 {
3373 gimplify_and_add (cond, pre_p);
3374 *expr_p = unshare_expr (result);
3375 }
3376 else
3377 *expr_p = cond;
3378 return ret;
3379 }
3380 else
3381 ret = GS_UNHANDLED;
3382 break;
3383
3384 case CALL_EXPR:
3385 /* For calls that return in memory, give *to_p as the CALL_EXPR's
3386 return slot so that we don't generate a temporary. */
3387 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
3388 && aggregate_value_p (*from_p, *from_p))
3389 {
3390 bool use_target;
3391
3392 if (!(rhs_predicate_for (*to_p))(*from_p))
3393 /* If we need a temporary, *to_p isn't accurate. */
3394 use_target = false;
3395 else if (TREE_CODE (*to_p) == RESULT_DECL
3396 && DECL_NAME (*to_p) == NULL_TREE
3397 && needs_to_live_in_memory (*to_p))
3398 /* It's OK to use the return slot directly unless it's an NRV. */
3399 use_target = true;
3400 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
3401 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
3402 /* Don't force regs into memory. */
3403 use_target = false;
3404 else if (TREE_CODE (*to_p) == VAR_DECL
3405 && DECL_GIMPLE_FORMAL_TEMP_P (*to_p))
3406 /* Don't use the original target if it's a formal temp; we
3407 don't want to take their addresses. */
3408 use_target = false;
3409 else if (TREE_CODE (*expr_p) == INIT_EXPR)
3410 /* It's OK to use the target directly if it's being
3411 initialized. */
3412 use_target = true;
3413 else if (!is_gimple_non_addressable (*to_p))
3414 /* Don't use the original target if it's already addressable;
3415 if its address escapes, and the called function uses the
3416 NRV optimization, a conforming program could see *to_p
3417 change before the called function returns; see c++/19317.
3418 When optimizing, the return_slot pass marks more functions
3419 as safe after we have escape info. */
3420 use_target = false;
3421 else
3422 use_target = true;
3423
3424 if (use_target)
3425 {
3426 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
3427 lang_hooks.mark_addressable (*to_p);
3428 }
3429 }
3430
3431 ret = GS_UNHANDLED;
3432 break;
3433
3434 /* If we're initializing from a container, push the initialization
3435 inside it. */
3436 case CLEANUP_POINT_EXPR:
3437 case BIND_EXPR:
3438 case STATEMENT_LIST:
3439 {
3440 tree wrap = *from_p;
3441 tree t;
3442
3443 ret = gimplify_expr (to_p, pre_p, post_p,
3444 is_gimple_min_lval, fb_lvalue);
3445 if (ret != GS_ERROR)
3446 ret = GS_OK;
3447
3448 t = voidify_wrapper_expr (wrap, *expr_p);
3449 gcc_assert (t == *expr_p);
3450
3451 if (want_value)
3452 {
3453 gimplify_and_add (wrap, pre_p);
3454 *expr_p = unshare_expr (*to_p);
3455 }
3456 else
3457 *expr_p = wrap;
3458 return GS_OK;
3459 }
3460
3461 default:
3462 ret = GS_UNHANDLED;
3463 break;
3464 }
3465
3466 return ret;
3467 }
3468
3469 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
3470 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
3471 DECL_COMPLEX_GIMPLE_REG_P set. */
3472
3473 static enum gimplify_status
gimplify_modify_expr_complex_part(tree * expr_p,tree * pre_p,bool want_value)3474 gimplify_modify_expr_complex_part (tree *expr_p, tree *pre_p, bool want_value)
3475 {
3476 enum tree_code code, ocode;
3477 tree lhs, rhs, new_rhs, other, realpart, imagpart;
3478
3479 lhs = TREE_OPERAND (*expr_p, 0);
3480 rhs = TREE_OPERAND (*expr_p, 1);
3481 code = TREE_CODE (lhs);
3482 lhs = TREE_OPERAND (lhs, 0);
3483
3484 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
3485 other = build1 (ocode, TREE_TYPE (rhs), lhs);
3486 other = get_formal_tmp_var (other, pre_p);
3487
3488 realpart = code == REALPART_EXPR ? rhs : other;
3489 imagpart = code == REALPART_EXPR ? other : rhs;
3490
3491 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
3492 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
3493 else
3494 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
3495
3496 TREE_OPERAND (*expr_p, 0) = lhs;
3497 TREE_OPERAND (*expr_p, 1) = new_rhs;
3498
3499 if (want_value)
3500 {
3501 append_to_statement_list (*expr_p, pre_p);
3502 *expr_p = rhs;
3503 }
3504
3505 return GS_ALL_DONE;
3506 }
3507
3508 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
3509
3510 modify_expr
3511 : varname '=' rhs
3512 | '*' ID '=' rhs
3513
3514 PRE_P points to the list where side effects that must happen before
3515 *EXPR_P should be stored.
3516
3517 POST_P points to the list where side effects that must happen after
3518 *EXPR_P should be stored.
3519
3520 WANT_VALUE is nonzero iff we want to use the value of this expression
3521 in another expression. */
3522
3523 static enum gimplify_status
gimplify_modify_expr(tree * expr_p,tree * pre_p,tree * post_p,bool want_value)3524 gimplify_modify_expr (tree *expr_p, tree *pre_p, tree *post_p, bool want_value)
3525 {
3526 tree *from_p = &TREE_OPERAND (*expr_p, 1);
3527 tree *to_p = &TREE_OPERAND (*expr_p, 0);
3528 enum gimplify_status ret = GS_UNHANDLED;
3529
3530 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
3531 || TREE_CODE (*expr_p) == INIT_EXPR);
3532
3533 /* See if any simplifications can be done based on what the RHS is. */
3534 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
3535 want_value);
3536 if (ret != GS_UNHANDLED)
3537 return ret;
3538
3539 /* For zero sized types only gimplify the left hand side and right hand
3540 side as statements and throw away the assignment. Do this after
3541 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
3542 types properly. */
3543 if (zero_sized_type (TREE_TYPE (*from_p)))
3544 {
3545 gimplify_stmt (from_p);
3546 gimplify_stmt (to_p);
3547 append_to_statement_list (*from_p, pre_p);
3548 append_to_statement_list (*to_p, pre_p);
3549 *expr_p = NULL_TREE;
3550 return GS_ALL_DONE;
3551 }
3552
3553 /* If the value being copied is of variable width, compute the length
3554 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
3555 before gimplifying any of the operands so that we can resolve any
3556 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
3557 the size of the expression to be copied, not of the destination, so
3558 that is what we must here. */
3559 maybe_with_size_expr (from_p);
3560
3561 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3562 if (ret == GS_ERROR)
3563 return ret;
3564
3565 ret = gimplify_expr (from_p, pre_p, post_p,
3566 rhs_predicate_for (*to_p), fb_rvalue);
3567 if (ret == GS_ERROR)
3568 return ret;
3569
3570 /* Now see if the above changed *from_p to something we handle specially. */
3571 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
3572 want_value);
3573 if (ret != GS_UNHANDLED)
3574 return ret;
3575
3576 /* If we've got a variable sized assignment between two lvalues (i.e. does
3577 not involve a call), then we can make things a bit more straightforward
3578 by converting the assignment to memcpy or memset. */
3579 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
3580 {
3581 tree from = TREE_OPERAND (*from_p, 0);
3582 tree size = TREE_OPERAND (*from_p, 1);
3583
3584 if (TREE_CODE (from) == CONSTRUCTOR)
3585 return gimplify_modify_expr_to_memset (expr_p, size, want_value);
3586 if (is_gimple_addressable (from))
3587 {
3588 *from_p = from;
3589 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value);
3590 }
3591 }
3592
3593 /* Transform partial stores to non-addressable complex variables into
3594 total stores. This allows us to use real instead of virtual operands
3595 for these variables, which improves optimization. */
3596 if ((TREE_CODE (*to_p) == REALPART_EXPR
3597 || TREE_CODE (*to_p) == IMAGPART_EXPR)
3598 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
3599 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
3600
3601 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
3602 {
3603 /* If we've somehow already got an SSA_NAME on the LHS, then
3604 we're probably modified it twice. Not good. */
3605 gcc_assert (TREE_CODE (*to_p) != SSA_NAME);
3606 *to_p = make_ssa_name (*to_p, *expr_p);
3607 }
3608
3609 if (want_value)
3610 {
3611 append_to_statement_list (*expr_p, pre_p);
3612 *expr_p = *to_p;
3613 return GS_OK;
3614 }
3615
3616 return GS_ALL_DONE;
3617 }
3618
3619 /* Gimplify a comparison between two variable-sized objects. Do this
3620 with a call to BUILT_IN_MEMCMP. */
3621
3622 static enum gimplify_status
gimplify_variable_sized_compare(tree * expr_p)3623 gimplify_variable_sized_compare (tree *expr_p)
3624 {
3625 tree op0 = TREE_OPERAND (*expr_p, 0);
3626 tree op1 = TREE_OPERAND (*expr_p, 1);
3627 tree args, t, dest;
3628
3629 t = TYPE_SIZE_UNIT (TREE_TYPE (op0));
3630 t = unshare_expr (t);
3631 t = SUBSTITUTE_PLACEHOLDER_IN_EXPR (t, op0);
3632 args = tree_cons (NULL, t, NULL);
3633 t = build_fold_addr_expr (op1);
3634 args = tree_cons (NULL, t, args);
3635 dest = build_fold_addr_expr (op0);
3636 args = tree_cons (NULL, dest, args);
3637 t = implicit_built_in_decls[BUILT_IN_MEMCMP];
3638 t = build_function_call_expr (t, args);
3639 *expr_p
3640 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
3641
3642 return GS_OK;
3643 }
3644
3645 /* Gimplify a comparison between two aggregate objects of integral scalar
3646 mode as a comparison between the bitwise equivalent scalar values. */
3647
3648 static enum gimplify_status
gimplify_scalar_mode_aggregate_compare(tree * expr_p)3649 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
3650 {
3651 tree op0 = TREE_OPERAND (*expr_p, 0);
3652 tree op1 = TREE_OPERAND (*expr_p, 1);
3653
3654 tree type = TREE_TYPE (op0);
3655 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
3656
3657 op0 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op0);
3658 op1 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op1);
3659
3660 *expr_p
3661 = fold_build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
3662
3663 return GS_OK;
3664 }
3665
3666 /* Gimplify TRUTH_ANDIF_EXPR and TRUTH_ORIF_EXPR expressions. EXPR_P
3667 points to the expression to gimplify.
3668
3669 Expressions of the form 'a && b' are gimplified to:
3670
3671 a && b ? true : false
3672
3673 gimplify_cond_expr will do the rest.
3674
3675 PRE_P points to the list where side effects that must happen before
3676 *EXPR_P should be stored. */
3677
3678 static enum gimplify_status
gimplify_boolean_expr(tree * expr_p)3679 gimplify_boolean_expr (tree *expr_p)
3680 {
3681 /* Preserve the original type of the expression. */
3682 tree type = TREE_TYPE (*expr_p);
3683
3684 *expr_p = build3 (COND_EXPR, type, *expr_p,
3685 fold_convert (type, boolean_true_node),
3686 fold_convert (type, boolean_false_node));
3687
3688 return GS_OK;
3689 }
3690
3691 /* Gimplifies an expression sequence. This function gimplifies each
3692 expression and re-writes the original expression with the last
3693 expression of the sequence in GIMPLE form.
3694
3695 PRE_P points to the list where the side effects for all the
3696 expressions in the sequence will be emitted.
3697
3698 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
3699 /* ??? Should rearrange to share the pre-queue with all the indirect
3700 invocations of gimplify_expr. Would probably save on creations
3701 of statement_list nodes. */
3702
3703 static enum gimplify_status
gimplify_compound_expr(tree * expr_p,tree * pre_p,bool want_value)3704 gimplify_compound_expr (tree *expr_p, tree *pre_p, bool want_value)
3705 {
3706 tree t = *expr_p;
3707
3708 do
3709 {
3710 tree *sub_p = &TREE_OPERAND (t, 0);
3711
3712 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
3713 gimplify_compound_expr (sub_p, pre_p, false);
3714 else
3715 gimplify_stmt (sub_p);
3716 append_to_statement_list (*sub_p, pre_p);
3717
3718 t = TREE_OPERAND (t, 1);
3719 }
3720 while (TREE_CODE (t) == COMPOUND_EXPR);
3721
3722 *expr_p = t;
3723 if (want_value)
3724 return GS_OK;
3725 else
3726 {
3727 gimplify_stmt (expr_p);
3728 return GS_ALL_DONE;
3729 }
3730 }
3731
3732 /* Gimplifies a statement list. These may be created either by an
3733 enlightened front-end, or by shortcut_cond_expr. */
3734
3735 static enum gimplify_status
gimplify_statement_list(tree * expr_p,tree * pre_p)3736 gimplify_statement_list (tree *expr_p, tree *pre_p)
3737 {
3738 tree temp = voidify_wrapper_expr (*expr_p, NULL);
3739
3740 tree_stmt_iterator i = tsi_start (*expr_p);
3741
3742 while (!tsi_end_p (i))
3743 {
3744 tree t;
3745
3746 gimplify_stmt (tsi_stmt_ptr (i));
3747
3748 t = tsi_stmt (i);
3749 if (t == NULL)
3750 tsi_delink (&i);
3751 else if (TREE_CODE (t) == STATEMENT_LIST)
3752 {
3753 tsi_link_before (&i, t, TSI_SAME_STMT);
3754 tsi_delink (&i);
3755 }
3756 else
3757 tsi_next (&i);
3758 }
3759
3760 if (temp)
3761 {
3762 append_to_statement_list (*expr_p, pre_p);
3763 *expr_p = temp;
3764 return GS_OK;
3765 }
3766
3767 return GS_ALL_DONE;
3768 }
3769
3770 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
3771 gimplify. After gimplification, EXPR_P will point to a new temporary
3772 that holds the original value of the SAVE_EXPR node.
3773
3774 PRE_P points to the list where side effects that must happen before
3775 *EXPR_P should be stored. */
3776
3777 static enum gimplify_status
gimplify_save_expr(tree * expr_p,tree * pre_p,tree * post_p)3778 gimplify_save_expr (tree *expr_p, tree *pre_p, tree *post_p)
3779 {
3780 enum gimplify_status ret = GS_ALL_DONE;
3781 tree val;
3782
3783 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
3784 val = TREE_OPERAND (*expr_p, 0);
3785
3786 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
3787 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
3788 {
3789 /* The operand may be a void-valued expression such as SAVE_EXPRs
3790 generated by the Java frontend for class initialization. It is
3791 being executed only for its side-effects. */
3792 if (TREE_TYPE (val) == void_type_node)
3793 {
3794 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3795 is_gimple_stmt, fb_none);
3796 append_to_statement_list (TREE_OPERAND (*expr_p, 0), pre_p);
3797 val = NULL;
3798 }
3799 else
3800 val = get_initialized_tmp_var (val, pre_p, post_p);
3801
3802 TREE_OPERAND (*expr_p, 0) = val;
3803 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
3804 }
3805
3806 *expr_p = val;
3807
3808 return ret;
3809 }
3810
3811 /* Re-write the ADDR_EXPR node pointed to by EXPR_P
3812
3813 unary_expr
3814 : ...
3815 | '&' varname
3816 ...
3817
3818 PRE_P points to the list where side effects that must happen before
3819 *EXPR_P should be stored.
3820
3821 POST_P points to the list where side effects that must happen after
3822 *EXPR_P should be stored. */
3823
3824 static enum gimplify_status
gimplify_addr_expr(tree * expr_p,tree * pre_p,tree * post_p)3825 gimplify_addr_expr (tree *expr_p, tree *pre_p, tree *post_p)
3826 {
3827 tree expr = *expr_p;
3828 tree op0 = TREE_OPERAND (expr, 0);
3829 enum gimplify_status ret;
3830
3831 switch (TREE_CODE (op0))
3832 {
3833 case INDIRECT_REF:
3834 case MISALIGNED_INDIRECT_REF:
3835 do_indirect_ref:
3836 /* Check if we are dealing with an expression of the form '&*ptr'.
3837 While the front end folds away '&*ptr' into 'ptr', these
3838 expressions may be generated internally by the compiler (e.g.,
3839 builtins like __builtin_va_end). */
3840 /* Caution: the silent array decomposition semantics we allow for
3841 ADDR_EXPR means we can't always discard the pair. */
3842 /* Gimplification of the ADDR_EXPR operand may drop
3843 cv-qualification conversions, so make sure we add them if
3844 needed. */
3845 {
3846 tree op00 = TREE_OPERAND (op0, 0);
3847 tree t_expr = TREE_TYPE (expr);
3848 tree t_op00 = TREE_TYPE (op00);
3849
3850 if (!lang_hooks.types_compatible_p (t_expr, t_op00))
3851 {
3852 #ifdef ENABLE_CHECKING
3853 tree t_op0 = TREE_TYPE (op0);
3854 gcc_assert (POINTER_TYPE_P (t_expr)
3855 && cpt_same_type (TREE_CODE (t_op0) == ARRAY_TYPE
3856 ? TREE_TYPE (t_op0) : t_op0,
3857 TREE_TYPE (t_expr))
3858 && POINTER_TYPE_P (t_op00)
3859 && cpt_same_type (t_op0, TREE_TYPE (t_op00)));
3860 #endif
3861 op00 = fold_convert (TREE_TYPE (expr), op00);
3862 }
3863 *expr_p = op00;
3864 ret = GS_OK;
3865 }
3866 break;
3867
3868 case VIEW_CONVERT_EXPR:
3869 /* Take the address of our operand and then convert it to the type of
3870 this ADDR_EXPR.
3871
3872 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
3873 all clear. The impact of this transformation is even less clear. */
3874
3875 /* If the operand is a useless conversion, look through it. Doing so
3876 guarantees that the ADDR_EXPR and its operand will remain of the
3877 same type. */
3878 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
3879 op0 = TREE_OPERAND (op0, 0);
3880
3881 *expr_p = fold_convert (TREE_TYPE (expr),
3882 build_fold_addr_expr (TREE_OPERAND (op0, 0)));
3883 ret = GS_OK;
3884 break;
3885
3886 default:
3887 /* We use fb_either here because the C frontend sometimes takes
3888 the address of a call that returns a struct; see
3889 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
3890 the implied temporary explicit. */
3891 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
3892 is_gimple_addressable, fb_either);
3893 if (ret != GS_ERROR)
3894 {
3895 op0 = TREE_OPERAND (expr, 0);
3896
3897 /* For various reasons, the gimplification of the expression
3898 may have made a new INDIRECT_REF. */
3899 if (TREE_CODE (op0) == INDIRECT_REF)
3900 goto do_indirect_ref;
3901
3902 /* Make sure TREE_INVARIANT, TREE_CONSTANT, and TREE_SIDE_EFFECTS
3903 is set properly. */
3904 recompute_tree_invariant_for_addr_expr (expr);
3905
3906 /* Mark the RHS addressable. */
3907 lang_hooks.mark_addressable (TREE_OPERAND (expr, 0));
3908 }
3909 break;
3910 }
3911
3912 return ret;
3913 }
3914
3915 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
3916 value; output operands should be a gimple lvalue. */
3917
3918 static enum gimplify_status
gimplify_asm_expr(tree * expr_p,tree * pre_p,tree * post_p)3919 gimplify_asm_expr (tree *expr_p, tree *pre_p, tree *post_p)
3920 {
3921 tree expr = *expr_p;
3922 int noutputs = list_length (ASM_OUTPUTS (expr));
3923 const char **oconstraints
3924 = (const char **) alloca ((noutputs) * sizeof (const char *));
3925 int i;
3926 tree link;
3927 const char *constraint;
3928 bool allows_mem, allows_reg, is_inout;
3929 enum gimplify_status ret, tret;
3930
3931 ret = GS_ALL_DONE;
3932 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = TREE_CHAIN (link))
3933 {
3934 size_t constraint_len;
3935 oconstraints[i] = constraint
3936 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3937 constraint_len = strlen (constraint);
3938 if (constraint_len == 0)
3939 continue;
3940
3941 parse_output_constraint (&constraint, i, 0, 0,
3942 &allows_mem, &allows_reg, &is_inout);
3943
3944 if (!allows_reg && allows_mem)
3945 lang_hooks.mark_addressable (TREE_VALUE (link));
3946
3947 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
3948 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
3949 fb_lvalue | fb_mayfail);
3950 if (tret == GS_ERROR)
3951 {
3952 error ("invalid lvalue in asm output %d", i);
3953 ret = tret;
3954 }
3955
3956 if (is_inout)
3957 {
3958 /* An input/output operand. To give the optimizers more
3959 flexibility, split it into separate input and output
3960 operands. */
3961 tree input;
3962 char buf[10];
3963
3964 /* Turn the in/out constraint into an output constraint. */
3965 char *p = xstrdup (constraint);
3966 p[0] = '=';
3967 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
3968
3969 /* And add a matching input constraint. */
3970 if (allows_reg)
3971 {
3972 sprintf (buf, "%d", i);
3973
3974 /* If there are multiple alternatives in the constraint,
3975 handle each of them individually. Those that allow register
3976 will be replaced with operand number, the others will stay
3977 unchanged. */
3978 if (strchr (p, ',') != NULL)
3979 {
3980 size_t len = 0, buflen = strlen (buf);
3981 char *beg, *end, *str, *dst;
3982
3983 for (beg = p + 1;;)
3984 {
3985 end = strchr (beg, ',');
3986 if (end == NULL)
3987 end = strchr (beg, '\0');
3988 if ((size_t) (end - beg) < buflen)
3989 len += buflen + 1;
3990 else
3991 len += end - beg + 1;
3992 if (*end)
3993 beg = end + 1;
3994 else
3995 break;
3996 }
3997
3998 str = (char *) alloca (len);
3999 for (beg = p + 1, dst = str;;)
4000 {
4001 const char *tem;
4002 bool mem_p, reg_p, inout_p;
4003
4004 end = strchr (beg, ',');
4005 if (end)
4006 *end = '\0';
4007 beg[-1] = '=';
4008 tem = beg - 1;
4009 parse_output_constraint (&tem, i, 0, 0,
4010 &mem_p, ®_p, &inout_p);
4011 if (dst != str)
4012 *dst++ = ',';
4013 if (reg_p)
4014 {
4015 memcpy (dst, buf, buflen);
4016 dst += buflen;
4017 }
4018 else
4019 {
4020 if (end)
4021 len = end - beg;
4022 else
4023 len = strlen (beg);
4024 memcpy (dst, beg, len);
4025 dst += len;
4026 }
4027 if (end)
4028 beg = end + 1;
4029 else
4030 break;
4031 }
4032 *dst = '\0';
4033 input = build_string (dst - str, str);
4034 }
4035 else
4036 input = build_string (strlen (buf), buf);
4037 }
4038 else
4039 input = build_string (constraint_len - 1, constraint + 1);
4040
4041 free (p);
4042
4043 input = build_tree_list (build_tree_list (NULL_TREE, input),
4044 unshare_expr (TREE_VALUE (link)));
4045 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
4046 }
4047 }
4048
4049 for (link = ASM_INPUTS (expr); link; ++i, link = TREE_CHAIN (link))
4050 {
4051 constraint
4052 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4053 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4054 oconstraints, &allows_mem, &allows_reg);
4055
4056 /* If we can't make copies, we can only accept memory. */
4057 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
4058 {
4059 if (allows_mem)
4060 allows_reg = 0;
4061 else
4062 {
4063 error ("impossible constraint in %<asm%>");
4064 error ("non-memory input %d must stay in memory", i);
4065 return GS_ERROR;
4066 }
4067 }
4068
4069 /* If the operand is a memory input, it should be an lvalue. */
4070 if (!allows_reg && allows_mem)
4071 {
4072 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4073 is_gimple_lvalue, fb_lvalue | fb_mayfail);
4074 lang_hooks.mark_addressable (TREE_VALUE (link));
4075 if (tret == GS_ERROR)
4076 {
4077 error ("memory input %d is not directly addressable", i);
4078 ret = tret;
4079 }
4080 }
4081 else
4082 {
4083 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4084 is_gimple_asm_val, fb_rvalue);
4085 if (tret == GS_ERROR)
4086 ret = tret;
4087 }
4088 }
4089
4090 return ret;
4091 }
4092
4093 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
4094 WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
4095 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
4096 return to this function.
4097
4098 FIXME should we complexify the prequeue handling instead? Or use flags
4099 for all the cleanups and let the optimizer tighten them up? The current
4100 code seems pretty fragile; it will break on a cleanup within any
4101 non-conditional nesting. But any such nesting would be broken, anyway;
4102 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
4103 and continues out of it. We can do that at the RTL level, though, so
4104 having an optimizer to tighten up try/finally regions would be a Good
4105 Thing. */
4106
4107 static enum gimplify_status
gimplify_cleanup_point_expr(tree * expr_p,tree * pre_p)4108 gimplify_cleanup_point_expr (tree *expr_p, tree *pre_p)
4109 {
4110 tree_stmt_iterator iter;
4111 tree body;
4112
4113 tree temp = voidify_wrapper_expr (*expr_p, NULL);
4114
4115 /* We only care about the number of conditions between the innermost
4116 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
4117 any cleanups collected outside the CLEANUP_POINT_EXPR. */
4118 int old_conds = gimplify_ctxp->conditions;
4119 tree old_cleanups = gimplify_ctxp->conditional_cleanups;
4120 gimplify_ctxp->conditions = 0;
4121 gimplify_ctxp->conditional_cleanups = NULL_TREE;
4122
4123 body = TREE_OPERAND (*expr_p, 0);
4124 gimplify_to_stmt_list (&body);
4125
4126 gimplify_ctxp->conditions = old_conds;
4127 gimplify_ctxp->conditional_cleanups = old_cleanups;
4128
4129 for (iter = tsi_start (body); !tsi_end_p (iter); )
4130 {
4131 tree *wce_p = tsi_stmt_ptr (iter);
4132 tree wce = *wce_p;
4133
4134 if (TREE_CODE (wce) == WITH_CLEANUP_EXPR)
4135 {
4136 if (tsi_one_before_end_p (iter))
4137 {
4138 tsi_link_before (&iter, TREE_OPERAND (wce, 0), TSI_SAME_STMT);
4139 tsi_delink (&iter);
4140 break;
4141 }
4142 else
4143 {
4144 tree sl, tfe;
4145 enum tree_code code;
4146
4147 if (CLEANUP_EH_ONLY (wce))
4148 code = TRY_CATCH_EXPR;
4149 else
4150 code = TRY_FINALLY_EXPR;
4151
4152 sl = tsi_split_statement_list_after (&iter);
4153 tfe = build2 (code, void_type_node, sl, NULL_TREE);
4154 append_to_statement_list (TREE_OPERAND (wce, 0),
4155 &TREE_OPERAND (tfe, 1));
4156 *wce_p = tfe;
4157 iter = tsi_start (sl);
4158 }
4159 }
4160 else
4161 tsi_next (&iter);
4162 }
4163
4164 if (temp)
4165 {
4166 *expr_p = temp;
4167 append_to_statement_list (body, pre_p);
4168 return GS_OK;
4169 }
4170 else
4171 {
4172 *expr_p = body;
4173 return GS_ALL_DONE;
4174 }
4175 }
4176
4177 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
4178 is the cleanup action required. */
4179
4180 static void
gimple_push_cleanup(tree var,tree cleanup,bool eh_only,tree * pre_p)4181 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, tree *pre_p)
4182 {
4183 tree wce;
4184
4185 /* Errors can result in improperly nested cleanups. Which results in
4186 confusion when trying to resolve the WITH_CLEANUP_EXPR. */
4187 if (errorcount || sorrycount)
4188 return;
4189
4190 if (gimple_conditional_context ())
4191 {
4192 /* If we're in a conditional context, this is more complex. We only
4193 want to run the cleanup if we actually ran the initialization that
4194 necessitates it, but we want to run it after the end of the
4195 conditional context. So we wrap the try/finally around the
4196 condition and use a flag to determine whether or not to actually
4197 run the destructor. Thus
4198
4199 test ? f(A()) : 0
4200
4201 becomes (approximately)
4202
4203 flag = 0;
4204 try {
4205 if (test) { A::A(temp); flag = 1; val = f(temp); }
4206 else { val = 0; }
4207 } finally {
4208 if (flag) A::~A(temp);
4209 }
4210 val
4211 */
4212
4213 tree flag = create_tmp_var (boolean_type_node, "cleanup");
4214 tree ffalse = build2 (MODIFY_EXPR, void_type_node, flag,
4215 boolean_false_node);
4216 tree ftrue = build2 (MODIFY_EXPR, void_type_node, flag,
4217 boolean_true_node);
4218 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
4219 wce = build1 (WITH_CLEANUP_EXPR, void_type_node, cleanup);
4220 append_to_statement_list (ffalse, &gimplify_ctxp->conditional_cleanups);
4221 append_to_statement_list (wce, &gimplify_ctxp->conditional_cleanups);
4222 append_to_statement_list (ftrue, pre_p);
4223
4224 /* Because of this manipulation, and the EH edges that jump
4225 threading cannot redirect, the temporary (VAR) will appear
4226 to be used uninitialized. Don't warn. */
4227 TREE_NO_WARNING (var) = 1;
4228 }
4229 else
4230 {
4231 wce = build1 (WITH_CLEANUP_EXPR, void_type_node, cleanup);
4232 CLEANUP_EH_ONLY (wce) = eh_only;
4233 append_to_statement_list (wce, pre_p);
4234 }
4235
4236 gimplify_stmt (&TREE_OPERAND (wce, 0));
4237 }
4238
4239 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
4240
4241 static enum gimplify_status
gimplify_target_expr(tree * expr_p,tree * pre_p,tree * post_p)4242 gimplify_target_expr (tree *expr_p, tree *pre_p, tree *post_p)
4243 {
4244 tree targ = *expr_p;
4245 tree temp = TARGET_EXPR_SLOT (targ);
4246 tree init = TARGET_EXPR_INITIAL (targ);
4247 enum gimplify_status ret;
4248
4249 if (init)
4250 {
4251 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
4252 to the temps list. */
4253 gimple_add_tmp_var (temp);
4254
4255 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
4256 expression is supposed to initialize the slot. */
4257 if (VOID_TYPE_P (TREE_TYPE (init)))
4258 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
4259 else
4260 {
4261 init = build2 (INIT_EXPR, void_type_node, temp, init);
4262 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt,
4263 fb_none);
4264 }
4265 if (ret == GS_ERROR)
4266 {
4267 /* PR c++/28266 Make sure this is expanded only once. */
4268 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
4269 return GS_ERROR;
4270 }
4271 append_to_statement_list (init, pre_p);
4272
4273 /* If needed, push the cleanup for the temp. */
4274 if (TARGET_EXPR_CLEANUP (targ))
4275 {
4276 gimplify_stmt (&TARGET_EXPR_CLEANUP (targ));
4277 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
4278 CLEANUP_EH_ONLY (targ), pre_p);
4279 }
4280
4281 /* Only expand this once. */
4282 TREE_OPERAND (targ, 3) = init;
4283 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
4284 }
4285 else
4286 /* We should have expanded this before. */
4287 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
4288
4289 *expr_p = temp;
4290 return GS_OK;
4291 }
4292
4293 /* Gimplification of expression trees. */
4294
4295 /* Gimplify an expression which appears at statement context; usually, this
4296 means replacing it with a suitably gimple STATEMENT_LIST. */
4297
4298 void
gimplify_stmt(tree * stmt_p)4299 gimplify_stmt (tree *stmt_p)
4300 {
4301 gimplify_expr (stmt_p, NULL, NULL, is_gimple_stmt, fb_none);
4302 }
4303
4304 /* Similarly, but force the result to be a STATEMENT_LIST. */
4305
4306 void
gimplify_to_stmt_list(tree * stmt_p)4307 gimplify_to_stmt_list (tree *stmt_p)
4308 {
4309 gimplify_stmt (stmt_p);
4310 if (!*stmt_p)
4311 *stmt_p = alloc_stmt_list ();
4312 else if (TREE_CODE (*stmt_p) != STATEMENT_LIST)
4313 {
4314 tree t = *stmt_p;
4315 *stmt_p = alloc_stmt_list ();
4316 append_to_statement_list (t, stmt_p);
4317 }
4318 }
4319
4320
4321 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
4322 to CTX. If entries already exist, force them to be some flavor of private.
4323 If there is no enclosing parallel, do nothing. */
4324
4325 void
omp_firstprivatize_variable(struct gimplify_omp_ctx * ctx,tree decl)4326 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
4327 {
4328 splay_tree_node n;
4329
4330 if (decl == NULL || !DECL_P (decl))
4331 return;
4332
4333 do
4334 {
4335 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4336 if (n != NULL)
4337 {
4338 if (n->value & GOVD_SHARED)
4339 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
4340 else
4341 return;
4342 }
4343 else if (ctx->is_parallel)
4344 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
4345
4346 ctx = ctx->outer_context;
4347 }
4348 while (ctx);
4349 }
4350
4351 /* Similarly for each of the type sizes of TYPE. */
4352
4353 static void
omp_firstprivatize_type_sizes(struct gimplify_omp_ctx * ctx,tree type)4354 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
4355 {
4356 if (type == NULL || type == error_mark_node)
4357 return;
4358 type = TYPE_MAIN_VARIANT (type);
4359
4360 if (pointer_set_insert (ctx->privatized_types, type))
4361 return;
4362
4363 switch (TREE_CODE (type))
4364 {
4365 case INTEGER_TYPE:
4366 case ENUMERAL_TYPE:
4367 case BOOLEAN_TYPE:
4368 case REAL_TYPE:
4369 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
4370 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
4371 break;
4372
4373 case ARRAY_TYPE:
4374 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
4375 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
4376 break;
4377
4378 case RECORD_TYPE:
4379 case UNION_TYPE:
4380 case QUAL_UNION_TYPE:
4381 {
4382 tree field;
4383 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4384 if (TREE_CODE (field) == FIELD_DECL)
4385 {
4386 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
4387 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
4388 }
4389 }
4390 break;
4391
4392 case POINTER_TYPE:
4393 case REFERENCE_TYPE:
4394 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
4395 break;
4396
4397 default:
4398 break;
4399 }
4400
4401 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
4402 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
4403 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
4404 }
4405
4406 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
4407
4408 static void
omp_add_variable(struct gimplify_omp_ctx * ctx,tree decl,unsigned int flags)4409 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
4410 {
4411 splay_tree_node n;
4412 unsigned int nflags;
4413 tree t;
4414
4415 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4416 return;
4417
4418 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
4419 there are constructors involved somewhere. */
4420 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
4421 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
4422 flags |= GOVD_SEEN;
4423
4424 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4425 if (n != NULL)
4426 {
4427 /* We shouldn't be re-adding the decl with the same data
4428 sharing class. */
4429 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
4430 /* The only combination of data sharing classes we should see is
4431 FIRSTPRIVATE and LASTPRIVATE. */
4432 nflags = n->value | flags;
4433 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
4434 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE));
4435 n->value = nflags;
4436 return;
4437 }
4438
4439 /* When adding a variable-sized variable, we have to handle all sorts
4440 of additional bits of data: the pointer replacement variable, and
4441 the parameters of the type. */
4442 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
4443 {
4444 /* Add the pointer replacement variable as PRIVATE if the variable
4445 replacement is private, else FIRSTPRIVATE since we'll need the
4446 address of the original variable either for SHARED, or for the
4447 copy into or out of the context. */
4448 if (!(flags & GOVD_LOCAL))
4449 {
4450 nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
4451 nflags |= flags & GOVD_SEEN;
4452 t = DECL_VALUE_EXPR (decl);
4453 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
4454 t = TREE_OPERAND (t, 0);
4455 gcc_assert (DECL_P (t));
4456 omp_add_variable (ctx, t, nflags);
4457 }
4458
4459 /* Add all of the variable and type parameters (which should have
4460 been gimplified to a formal temporary) as FIRSTPRIVATE. */
4461 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
4462 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
4463 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
4464
4465 /* The variable-sized variable itself is never SHARED, only some form
4466 of PRIVATE. The sharing would take place via the pointer variable
4467 which we remapped above. */
4468 if (flags & GOVD_SHARED)
4469 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
4470 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
4471
4472 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
4473 alloca statement we generate for the variable, so make sure it
4474 is available. This isn't automatically needed for the SHARED
4475 case, since we won't be allocating local storage then.
4476 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
4477 in this case omp_notice_variable will be called later
4478 on when it is gimplified. */
4479 else if (! (flags & GOVD_LOCAL))
4480 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
4481 }
4482 else if (lang_hooks.decls.omp_privatize_by_reference (decl))
4483 {
4484 gcc_assert ((flags & GOVD_LOCAL) == 0);
4485 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
4486
4487 /* Similar to the direct variable sized case above, we'll need the
4488 size of references being privatized. */
4489 if ((flags & GOVD_SHARED) == 0)
4490 {
4491 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
4492 if (TREE_CODE (t) != INTEGER_CST)
4493 omp_notice_variable (ctx, t, true);
4494 }
4495 }
4496
4497 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
4498 }
4499
4500 /* Record the fact that DECL was used within the OpenMP context CTX.
4501 IN_CODE is true when real code uses DECL, and false when we should
4502 merely emit default(none) errors. Return true if DECL is going to
4503 be remapped and thus DECL shouldn't be gimplified into its
4504 DECL_VALUE_EXPR (if any). */
4505
4506 static bool
omp_notice_variable(struct gimplify_omp_ctx * ctx,tree decl,bool in_code)4507 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
4508 {
4509 splay_tree_node n;
4510 unsigned flags = in_code ? GOVD_SEEN : 0;
4511 bool ret = false, shared;
4512
4513 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4514 return false;
4515
4516 /* Threadprivate variables are predetermined. */
4517 if (is_global_var (decl))
4518 {
4519 if (DECL_THREAD_LOCAL_P (decl))
4520 return false;
4521
4522 if (DECL_HAS_VALUE_EXPR_P (decl))
4523 {
4524 tree value = get_base_address (DECL_VALUE_EXPR (decl));
4525
4526 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
4527 return false;
4528 }
4529 }
4530
4531 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4532 if (n == NULL)
4533 {
4534 enum omp_clause_default_kind default_kind, kind;
4535
4536 if (!ctx->is_parallel)
4537 goto do_outer;
4538
4539 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
4540 remapped firstprivate instead of shared. To some extent this is
4541 addressed in omp_firstprivatize_type_sizes, but not effectively. */
4542 default_kind = ctx->default_kind;
4543 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
4544 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
4545 default_kind = kind;
4546
4547 switch (default_kind)
4548 {
4549 case OMP_CLAUSE_DEFAULT_NONE:
4550 error ("%qs not specified in enclosing parallel",
4551 IDENTIFIER_POINTER (DECL_NAME (decl)));
4552 error ("%Henclosing parallel", &ctx->location);
4553 /* FALLTHRU */
4554 case OMP_CLAUSE_DEFAULT_SHARED:
4555 flags |= GOVD_SHARED;
4556 break;
4557 case OMP_CLAUSE_DEFAULT_PRIVATE:
4558 flags |= GOVD_PRIVATE;
4559 break;
4560 default:
4561 gcc_unreachable ();
4562 }
4563
4564 omp_add_variable (ctx, decl, flags);
4565
4566 shared = (flags & GOVD_SHARED) != 0;
4567 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
4568 goto do_outer;
4569 }
4570
4571 shared = ((flags | n->value) & GOVD_SHARED) != 0;
4572 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
4573
4574 /* If nothing changed, there's nothing left to do. */
4575 if ((n->value & flags) == flags)
4576 return ret;
4577 flags |= n->value;
4578 n->value = flags;
4579
4580 do_outer:
4581 /* If the variable is private in the current context, then we don't
4582 need to propagate anything to an outer context. */
4583 if (flags & GOVD_PRIVATE)
4584 return ret;
4585 if (ctx->outer_context
4586 && omp_notice_variable (ctx->outer_context, decl, in_code))
4587 return true;
4588 return ret;
4589 }
4590
4591 /* Verify that DECL is private within CTX. If there's specific information
4592 to the contrary in the innermost scope, generate an error. */
4593
4594 static bool
omp_is_private(struct gimplify_omp_ctx * ctx,tree decl)4595 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl)
4596 {
4597 splay_tree_node n;
4598
4599 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4600 if (n != NULL)
4601 {
4602 if (n->value & GOVD_SHARED)
4603 {
4604 if (ctx == gimplify_omp_ctxp)
4605 {
4606 error ("iteration variable %qs should be private",
4607 IDENTIFIER_POINTER (DECL_NAME (decl)));
4608 n->value = GOVD_PRIVATE;
4609 return true;
4610 }
4611 else
4612 return false;
4613 }
4614 else if ((n->value & GOVD_EXPLICIT) != 0
4615 && (ctx == gimplify_omp_ctxp
4616 || (ctx->is_combined_parallel
4617 && gimplify_omp_ctxp->outer_context == ctx)))
4618 {
4619 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
4620 error ("iteration variable %qs should not be firstprivate",
4621 IDENTIFIER_POINTER (DECL_NAME (decl)));
4622 else if ((n->value & GOVD_REDUCTION) != 0)
4623 error ("iteration variable %qs should not be reduction",
4624 IDENTIFIER_POINTER (DECL_NAME (decl)));
4625 }
4626 return true;
4627 }
4628
4629 if (ctx->is_parallel)
4630 return false;
4631 else if (ctx->outer_context)
4632 return omp_is_private (ctx->outer_context, decl);
4633 else
4634 return !is_global_var (decl);
4635 }
4636
4637 /* Return true if DECL is private within a parallel region
4638 that binds to the current construct's context or in parallel
4639 region's REDUCTION clause. */
4640
4641 static bool
omp_check_private(struct gimplify_omp_ctx * ctx,tree decl)4642 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
4643 {
4644 splay_tree_node n;
4645
4646 do
4647 {
4648 ctx = ctx->outer_context;
4649 if (ctx == NULL)
4650 return !(is_global_var (decl)
4651 /* References might be private, but might be shared too. */
4652 || lang_hooks.decls.omp_privatize_by_reference (decl));
4653
4654 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
4655 if (n != NULL)
4656 return (n->value & GOVD_SHARED) == 0;
4657 }
4658 while (!ctx->is_parallel);
4659 return false;
4660 }
4661
4662 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
4663 and previous omp contexts. */
4664
4665 static void
gimplify_scan_omp_clauses(tree * list_p,tree * pre_p,bool in_parallel,bool in_combined_parallel)4666 gimplify_scan_omp_clauses (tree *list_p, tree *pre_p, bool in_parallel,
4667 bool in_combined_parallel)
4668 {
4669 struct gimplify_omp_ctx *ctx, *outer_ctx;
4670 tree c;
4671
4672 ctx = new_omp_context (in_parallel, in_combined_parallel);
4673 outer_ctx = ctx->outer_context;
4674
4675 while ((c = *list_p) != NULL)
4676 {
4677 enum gimplify_status gs;
4678 bool remove = false;
4679 bool notice_outer = true;
4680 const char *check_non_private = NULL;
4681 unsigned int flags;
4682 tree decl;
4683
4684 switch (OMP_CLAUSE_CODE (c))
4685 {
4686 case OMP_CLAUSE_PRIVATE:
4687 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
4688 notice_outer = false;
4689 goto do_add;
4690 case OMP_CLAUSE_SHARED:
4691 flags = GOVD_SHARED | GOVD_EXPLICIT;
4692 goto do_add;
4693 case OMP_CLAUSE_FIRSTPRIVATE:
4694 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
4695 check_non_private = "firstprivate";
4696 goto do_add;
4697 case OMP_CLAUSE_LASTPRIVATE:
4698 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
4699 check_non_private = "lastprivate";
4700 goto do_add;
4701 case OMP_CLAUSE_REDUCTION:
4702 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
4703 check_non_private = "reduction";
4704 goto do_add;
4705
4706 do_add:
4707 decl = OMP_CLAUSE_DECL (c);
4708 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4709 {
4710 remove = true;
4711 break;
4712 }
4713 omp_add_variable (ctx, decl, flags);
4714 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
4715 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4716 {
4717 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
4718 GOVD_LOCAL | GOVD_SEEN);
4719 gimplify_omp_ctxp = ctx;
4720 push_gimplify_context ();
4721 gimplify_stmt (&OMP_CLAUSE_REDUCTION_INIT (c));
4722 pop_gimplify_context (OMP_CLAUSE_REDUCTION_INIT (c));
4723 push_gimplify_context ();
4724 gimplify_stmt (&OMP_CLAUSE_REDUCTION_MERGE (c));
4725 pop_gimplify_context (OMP_CLAUSE_REDUCTION_MERGE (c));
4726 gimplify_omp_ctxp = outer_ctx;
4727 }
4728 if (notice_outer)
4729 goto do_notice;
4730 break;
4731
4732 case OMP_CLAUSE_COPYIN:
4733 case OMP_CLAUSE_COPYPRIVATE:
4734 decl = OMP_CLAUSE_DECL (c);
4735 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4736 {
4737 remove = true;
4738 break;
4739 }
4740 do_notice:
4741 if (outer_ctx)
4742 omp_notice_variable (outer_ctx, decl, true);
4743 if (check_non_private
4744 && !in_parallel
4745 && omp_check_private (ctx, decl))
4746 {
4747 error ("%s variable %qs is private in outer context",
4748 check_non_private, IDENTIFIER_POINTER (DECL_NAME (decl)));
4749 remove = true;
4750 }
4751 break;
4752
4753 case OMP_CLAUSE_IF:
4754 OMP_CLAUSE_OPERAND (c, 0)
4755 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
4756 /* Fall through. */
4757
4758 case OMP_CLAUSE_SCHEDULE:
4759 case OMP_CLAUSE_NUM_THREADS:
4760 gs = gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
4761 is_gimple_val, fb_rvalue);
4762 if (gs == GS_ERROR)
4763 remove = true;
4764 break;
4765
4766 case OMP_CLAUSE_NOWAIT:
4767 case OMP_CLAUSE_ORDERED:
4768 break;
4769
4770 case OMP_CLAUSE_DEFAULT:
4771 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
4772 break;
4773
4774 default:
4775 gcc_unreachable ();
4776 }
4777
4778 if (remove)
4779 *list_p = OMP_CLAUSE_CHAIN (c);
4780 else
4781 list_p = &OMP_CLAUSE_CHAIN (c);
4782 }
4783
4784 gimplify_omp_ctxp = ctx;
4785 }
4786
4787 /* For all variables that were not actually used within the context,
4788 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
4789
4790 static int
gimplify_adjust_omp_clauses_1(splay_tree_node n,void * data)4791 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
4792 {
4793 tree *list_p = (tree *) data;
4794 tree decl = (tree) n->key;
4795 unsigned flags = n->value;
4796 enum omp_clause_code code;
4797 tree clause;
4798 bool private_debug;
4799
4800 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
4801 return 0;
4802 if ((flags & GOVD_SEEN) == 0)
4803 return 0;
4804 if (flags & GOVD_DEBUG_PRIVATE)
4805 {
4806 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
4807 private_debug = true;
4808 }
4809 else
4810 private_debug
4811 = lang_hooks.decls.omp_private_debug_clause (decl,
4812 !!(flags & GOVD_SHARED));
4813 if (private_debug)
4814 code = OMP_CLAUSE_PRIVATE;
4815 else if (flags & GOVD_SHARED)
4816 {
4817 if (is_global_var (decl))
4818 {
4819 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
4820 while (ctx != NULL)
4821 {
4822 splay_tree_node on
4823 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
4824 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
4825 | GOVD_PRIVATE | GOVD_REDUCTION)) != 0)
4826 break;
4827 ctx = ctx->outer_context;
4828 }
4829 if (ctx == NULL)
4830 return 0;
4831 }
4832 code = OMP_CLAUSE_SHARED;
4833 }
4834 else if (flags & GOVD_PRIVATE)
4835 code = OMP_CLAUSE_PRIVATE;
4836 else if (flags & GOVD_FIRSTPRIVATE)
4837 code = OMP_CLAUSE_FIRSTPRIVATE;
4838 else
4839 gcc_unreachable ();
4840
4841 clause = build_omp_clause (code);
4842 OMP_CLAUSE_DECL (clause) = decl;
4843 OMP_CLAUSE_CHAIN (clause) = *list_p;
4844 if (private_debug)
4845 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
4846 *list_p = clause;
4847
4848 return 0;
4849 }
4850
4851 static void
gimplify_adjust_omp_clauses(tree * list_p)4852 gimplify_adjust_omp_clauses (tree *list_p)
4853 {
4854 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
4855 tree c, decl;
4856
4857 while ((c = *list_p) != NULL)
4858 {
4859 splay_tree_node n;
4860 bool remove = false;
4861
4862 switch (OMP_CLAUSE_CODE (c))
4863 {
4864 case OMP_CLAUSE_PRIVATE:
4865 case OMP_CLAUSE_SHARED:
4866 case OMP_CLAUSE_FIRSTPRIVATE:
4867 decl = OMP_CLAUSE_DECL (c);
4868 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
4869 remove = !(n->value & GOVD_SEEN);
4870 if (! remove)
4871 {
4872 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
4873 if ((n->value & GOVD_DEBUG_PRIVATE)
4874 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
4875 {
4876 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
4877 || ((n->value & GOVD_DATA_SHARE_CLASS)
4878 == GOVD_PRIVATE));
4879 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
4880 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
4881 }
4882 }
4883 break;
4884
4885 case OMP_CLAUSE_LASTPRIVATE:
4886 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
4887 accurately reflect the presence of a FIRSTPRIVATE clause. */
4888 decl = OMP_CLAUSE_DECL (c);
4889 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
4890 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4891 = (n->value & GOVD_FIRSTPRIVATE) != 0;
4892 break;
4893
4894 case OMP_CLAUSE_REDUCTION:
4895 case OMP_CLAUSE_COPYIN:
4896 case OMP_CLAUSE_COPYPRIVATE:
4897 case OMP_CLAUSE_IF:
4898 case OMP_CLAUSE_NUM_THREADS:
4899 case OMP_CLAUSE_SCHEDULE:
4900 case OMP_CLAUSE_NOWAIT:
4901 case OMP_CLAUSE_ORDERED:
4902 case OMP_CLAUSE_DEFAULT:
4903 break;
4904
4905 default:
4906 gcc_unreachable ();
4907 }
4908
4909 if (remove)
4910 *list_p = OMP_CLAUSE_CHAIN (c);
4911 else
4912 list_p = &OMP_CLAUSE_CHAIN (c);
4913 }
4914
4915 /* Add in any implicit data sharing. */
4916 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
4917
4918 gimplify_omp_ctxp = ctx->outer_context;
4919 delete_omp_context (ctx);
4920 }
4921
4922 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
4923 gimplification of the body, as well as scanning the body for used
4924 variables. We need to do this scan now, because variable-sized
4925 decls will be decomposed during gimplification. */
4926
4927 static enum gimplify_status
gimplify_omp_parallel(tree * expr_p,tree * pre_p)4928 gimplify_omp_parallel (tree *expr_p, tree *pre_p)
4929 {
4930 tree expr = *expr_p;
4931
4932 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p, true,
4933 OMP_PARALLEL_COMBINED (expr));
4934
4935 push_gimplify_context ();
4936
4937 gimplify_stmt (&OMP_PARALLEL_BODY (expr));
4938
4939 if (TREE_CODE (OMP_PARALLEL_BODY (expr)) == BIND_EXPR)
4940 pop_gimplify_context (OMP_PARALLEL_BODY (expr));
4941 else
4942 pop_gimplify_context (NULL_TREE);
4943
4944 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
4945
4946 return GS_ALL_DONE;
4947 }
4948
4949 /* Gimplify the gross structure of an OMP_FOR statement. */
4950
4951 static enum gimplify_status
gimplify_omp_for(tree * expr_p,tree * pre_p)4952 gimplify_omp_for (tree *expr_p, tree *pre_p)
4953 {
4954 tree for_stmt, decl, t;
4955 enum gimplify_status ret = 0;
4956
4957 for_stmt = *expr_p;
4958
4959 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, false, false);
4960
4961 t = OMP_FOR_INIT (for_stmt);
4962 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
4963 decl = TREE_OPERAND (t, 0);
4964 gcc_assert (DECL_P (decl));
4965 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl)));
4966
4967 /* Make sure the iteration variable is private. */
4968 if (omp_is_private (gimplify_omp_ctxp, decl))
4969 omp_notice_variable (gimplify_omp_ctxp, decl, true);
4970 else
4971 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
4972
4973 ret |= gimplify_expr (&TREE_OPERAND (t, 1), &OMP_FOR_PRE_BODY (for_stmt),
4974 NULL, is_gimple_val, fb_rvalue);
4975
4976 t = OMP_FOR_COND (for_stmt);
4977 gcc_assert (COMPARISON_CLASS_P (t));
4978 gcc_assert (TREE_OPERAND (t, 0) == decl);
4979
4980 ret |= gimplify_expr (&TREE_OPERAND (t, 1), &OMP_FOR_PRE_BODY (for_stmt),
4981 NULL, is_gimple_val, fb_rvalue);
4982
4983 t = OMP_FOR_INCR (for_stmt);
4984 switch (TREE_CODE (t))
4985 {
4986 case PREINCREMENT_EXPR:
4987 case POSTINCREMENT_EXPR:
4988 t = build_int_cst (TREE_TYPE (decl), 1);
4989 goto build_modify;
4990 case PREDECREMENT_EXPR:
4991 case POSTDECREMENT_EXPR:
4992 t = build_int_cst (TREE_TYPE (decl), -1);
4993 goto build_modify;
4994 build_modify:
4995 t = build2 (PLUS_EXPR, TREE_TYPE (decl), decl, t);
4996 t = build2 (MODIFY_EXPR, void_type_node, decl, t);
4997 OMP_FOR_INCR (for_stmt) = t;
4998 break;
4999
5000 case MODIFY_EXPR:
5001 gcc_assert (TREE_OPERAND (t, 0) == decl);
5002 t = TREE_OPERAND (t, 1);
5003 switch (TREE_CODE (t))
5004 {
5005 case PLUS_EXPR:
5006 if (TREE_OPERAND (t, 1) == decl)
5007 {
5008 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
5009 TREE_OPERAND (t, 0) = decl;
5010 break;
5011 }
5012 case MINUS_EXPR:
5013 gcc_assert (TREE_OPERAND (t, 0) == decl);
5014 break;
5015 default:
5016 gcc_unreachable ();
5017 }
5018
5019 ret |= gimplify_expr (&TREE_OPERAND (t, 1), &OMP_FOR_PRE_BODY (for_stmt),
5020 NULL, is_gimple_val, fb_rvalue);
5021 break;
5022
5023 default:
5024 gcc_unreachable ();
5025 }
5026
5027 gimplify_to_stmt_list (&OMP_FOR_BODY (for_stmt));
5028 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt));
5029
5030 return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR;
5031 }
5032
5033 /* Gimplify the gross structure of other OpenMP worksharing constructs.
5034 In particular, OMP_SECTIONS and OMP_SINGLE. */
5035
5036 static enum gimplify_status
gimplify_omp_workshare(tree * expr_p,tree * pre_p)5037 gimplify_omp_workshare (tree *expr_p, tree *pre_p)
5038 {
5039 tree stmt = *expr_p;
5040
5041 gimplify_scan_omp_clauses (&OMP_CLAUSES (stmt), pre_p, false, false);
5042 gimplify_to_stmt_list (&OMP_BODY (stmt));
5043 gimplify_adjust_omp_clauses (&OMP_CLAUSES (stmt));
5044
5045 return GS_ALL_DONE;
5046 }
5047
5048 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
5049 stabilized the lhs of the atomic operation as *ADDR. Return true if
5050 EXPR is this stabilized form. */
5051
5052 static bool
goa_lhs_expr_p(tree expr,tree addr)5053 goa_lhs_expr_p (tree expr, tree addr)
5054 {
5055 /* Also include casts to other type variants. The C front end is fond
5056 of adding these for e.g. volatile variables. This is like
5057 STRIP_TYPE_NOPS but includes the main variant lookup. */
5058 while ((TREE_CODE (expr) == NOP_EXPR
5059 || TREE_CODE (expr) == CONVERT_EXPR
5060 || TREE_CODE (expr) == NON_LVALUE_EXPR)
5061 && TREE_OPERAND (expr, 0) != error_mark_node
5062 && (TYPE_MAIN_VARIANT (TREE_TYPE (expr))
5063 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))))
5064 expr = TREE_OPERAND (expr, 0);
5065
5066 if (TREE_CODE (expr) == INDIRECT_REF && TREE_OPERAND (expr, 0) == addr)
5067 return true;
5068 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
5069 return true;
5070 return false;
5071 }
5072
5073 /* A subroutine of gimplify_omp_atomic. Attempt to implement the atomic
5074 operation as a __sync_fetch_and_op builtin. INDEX is log2 of the
5075 size of the data type, and thus usable to find the index of the builtin
5076 decl. Returns GS_UNHANDLED if the expression is not of the proper form. */
5077
5078 static enum gimplify_status
gimplify_omp_atomic_fetch_op(tree * expr_p,tree addr,tree rhs,int index)5079 gimplify_omp_atomic_fetch_op (tree *expr_p, tree addr, tree rhs, int index)
5080 {
5081 enum built_in_function base;
5082 tree decl, args, itype;
5083 enum insn_code *optab;
5084
5085 /* Check for one of the supported fetch-op operations. */
5086 switch (TREE_CODE (rhs))
5087 {
5088 case PLUS_EXPR:
5089 base = BUILT_IN_FETCH_AND_ADD_N;
5090 optab = sync_add_optab;
5091 break;
5092 case MINUS_EXPR:
5093 base = BUILT_IN_FETCH_AND_SUB_N;
5094 optab = sync_add_optab;
5095 break;
5096 case BIT_AND_EXPR:
5097 base = BUILT_IN_FETCH_AND_AND_N;
5098 optab = sync_and_optab;
5099 break;
5100 case BIT_IOR_EXPR:
5101 base = BUILT_IN_FETCH_AND_OR_N;
5102 optab = sync_ior_optab;
5103 break;
5104 case BIT_XOR_EXPR:
5105 base = BUILT_IN_FETCH_AND_XOR_N;
5106 optab = sync_xor_optab;
5107 break;
5108 default:
5109 return GS_UNHANDLED;
5110 }
5111
5112 /* Make sure the expression is of the proper form. */
5113 if (goa_lhs_expr_p (TREE_OPERAND (rhs, 0), addr))
5114 rhs = TREE_OPERAND (rhs, 1);
5115 else if (commutative_tree_code (TREE_CODE (rhs))
5116 && goa_lhs_expr_p (TREE_OPERAND (rhs, 1), addr))
5117 rhs = TREE_OPERAND (rhs, 0);
5118 else
5119 return GS_UNHANDLED;
5120
5121 decl = built_in_decls[base + index + 1];
5122 itype = TREE_TYPE (TREE_TYPE (decl));
5123
5124 if (optab[TYPE_MODE (itype)] == CODE_FOR_nothing)
5125 return GS_UNHANDLED;
5126
5127 args = tree_cons (NULL, fold_convert (itype, rhs), NULL);
5128 args = tree_cons (NULL, addr, args);
5129 *expr_p = build_function_call_expr (decl, args);
5130 return GS_OK;
5131 }
5132
5133 /* A subroutine of gimplify_omp_atomic_pipeline. Walk *EXPR_P and replace
5134 appearances of *LHS_ADDR with LHS_VAR. If an expression does not involve
5135 the lhs, evaluate it into a temporary. Return 1 if the lhs appeared as
5136 a subexpression, 0 if it did not, or -1 if an error was encountered. */
5137
5138 static int
goa_stabilize_expr(tree * expr_p,tree * pre_p,tree lhs_addr,tree lhs_var)5139 goa_stabilize_expr (tree *expr_p, tree *pre_p, tree lhs_addr, tree lhs_var)
5140 {
5141 tree expr = *expr_p;
5142 int saw_lhs;
5143
5144 if (goa_lhs_expr_p (expr, lhs_addr))
5145 {
5146 *expr_p = lhs_var;
5147 return 1;
5148 }
5149 if (is_gimple_val (expr))
5150 return 0;
5151
5152 saw_lhs = 0;
5153 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
5154 {
5155 case tcc_binary:
5156 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
5157 lhs_addr, lhs_var);
5158 case tcc_unary:
5159 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
5160 lhs_addr, lhs_var);
5161 break;
5162 default:
5163 break;
5164 }
5165
5166 if (saw_lhs == 0)
5167 {
5168 enum gimplify_status gs;
5169 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
5170 if (gs != GS_ALL_DONE)
5171 saw_lhs = -1;
5172 }
5173
5174 return saw_lhs;
5175 }
5176
5177 /* A subroutine of gimplify_omp_atomic. Implement the atomic operation as:
5178
5179 oldval = *addr;
5180 repeat:
5181 newval = rhs; // with oldval replacing *addr in rhs
5182 oldval = __sync_val_compare_and_swap (addr, oldval, newval);
5183 if (oldval != newval)
5184 goto repeat;
5185
5186 INDEX is log2 of the size of the data type, and thus usable to find the
5187 index of the builtin decl. */
5188
5189 static enum gimplify_status
gimplify_omp_atomic_pipeline(tree * expr_p,tree * pre_p,tree addr,tree rhs,int index)5190 gimplify_omp_atomic_pipeline (tree *expr_p, tree *pre_p, tree addr,
5191 tree rhs, int index)
5192 {
5193 tree oldval, oldival, oldival2, newval, newival, label;
5194 tree type, itype, cmpxchg, args, x, iaddr;
5195
5196 cmpxchg = built_in_decls[BUILT_IN_VAL_COMPARE_AND_SWAP_N + index + 1];
5197 type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
5198 itype = TREE_TYPE (TREE_TYPE (cmpxchg));
5199
5200 if (sync_compare_and_swap[TYPE_MODE (itype)] == CODE_FOR_nothing)
5201 return GS_UNHANDLED;
5202
5203 oldval = create_tmp_var (type, NULL);
5204 newval = create_tmp_var (type, NULL);
5205
5206 /* Precompute as much of RHS as possible. In the same walk, replace
5207 occurrences of the lhs value with our temporary. */
5208 if (goa_stabilize_expr (&rhs, pre_p, addr, oldval) < 0)
5209 return GS_ERROR;
5210
5211 x = build_fold_indirect_ref (addr);
5212 x = build2 (MODIFY_EXPR, void_type_node, oldval, x);
5213 gimplify_and_add (x, pre_p);
5214
5215 /* For floating-point values, we'll need to view-convert them to integers
5216 so that we can perform the atomic compare and swap. Simplify the
5217 following code by always setting up the "i"ntegral variables. */
5218 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
5219 {
5220 oldival = oldval;
5221 newival = newval;
5222 iaddr = addr;
5223 }
5224 else
5225 {
5226 oldival = create_tmp_var (itype, NULL);
5227 newival = create_tmp_var (itype, NULL);
5228
5229 x = build1 (VIEW_CONVERT_EXPR, itype, oldval);
5230 x = build2 (MODIFY_EXPR, void_type_node, oldival, x);
5231 gimplify_and_add (x, pre_p);
5232 iaddr = fold_convert (build_pointer_type (itype), addr);
5233 }
5234
5235 oldival2 = create_tmp_var (itype, NULL);
5236
5237 label = create_artificial_label ();
5238 x = build1 (LABEL_EXPR, void_type_node, label);
5239 gimplify_and_add (x, pre_p);
5240
5241 x = build2 (MODIFY_EXPR, void_type_node, newval, rhs);
5242 gimplify_and_add (x, pre_p);
5243
5244 if (newval != newival)
5245 {
5246 x = build1 (VIEW_CONVERT_EXPR, itype, newval);
5247 x = build2 (MODIFY_EXPR, void_type_node, newival, x);
5248 gimplify_and_add (x, pre_p);
5249 }
5250
5251 x = build2 (MODIFY_EXPR, void_type_node, oldival2,
5252 fold_convert (itype, oldival));
5253 gimplify_and_add (x, pre_p);
5254
5255 args = tree_cons (NULL, fold_convert (itype, newival), NULL);
5256 args = tree_cons (NULL, fold_convert (itype, oldival), args);
5257 args = tree_cons (NULL, iaddr, args);
5258 x = build_function_call_expr (cmpxchg, args);
5259 if (oldval == oldival)
5260 x = fold_convert (type, x);
5261 x = build2 (MODIFY_EXPR, void_type_node, oldival, x);
5262 gimplify_and_add (x, pre_p);
5263
5264 /* For floating point, be prepared for the loop backedge. */
5265 if (oldval != oldival)
5266 {
5267 x = build1 (VIEW_CONVERT_EXPR, type, oldival);
5268 x = build2 (MODIFY_EXPR, void_type_node, oldval, x);
5269 gimplify_and_add (x, pre_p);
5270 }
5271
5272 /* Note that we always perform the comparison as an integer, even for
5273 floating point. This allows the atomic operation to properly
5274 succeed even with NaNs and -0.0. */
5275 x = build3 (COND_EXPR, void_type_node,
5276 build2 (NE_EXPR, boolean_type_node, oldival, oldival2),
5277 build1 (GOTO_EXPR, void_type_node, label), NULL);
5278 gimplify_and_add (x, pre_p);
5279
5280 *expr_p = NULL;
5281 return GS_ALL_DONE;
5282 }
5283
5284 /* A subroutine of gimplify_omp_atomic. Implement the atomic operation as:
5285
5286 GOMP_atomic_start ();
5287 *addr = rhs;
5288 GOMP_atomic_end ();
5289
5290 The result is not globally atomic, but works so long as all parallel
5291 references are within #pragma omp atomic directives. According to
5292 responses received from [email protected], appears to be within spec.
5293 Which makes sense, since that's how several other compilers handle
5294 this situation as well. */
5295
5296 static enum gimplify_status
gimplify_omp_atomic_mutex(tree * expr_p,tree * pre_p,tree addr,tree rhs)5297 gimplify_omp_atomic_mutex (tree *expr_p, tree *pre_p, tree addr, tree rhs)
5298 {
5299 tree t;
5300
5301 t = built_in_decls[BUILT_IN_GOMP_ATOMIC_START];
5302 t = build_function_call_expr (t, NULL);
5303 gimplify_and_add (t, pre_p);
5304
5305 t = build_fold_indirect_ref (addr);
5306 t = build2 (MODIFY_EXPR, void_type_node, t, rhs);
5307 gimplify_and_add (t, pre_p);
5308
5309 t = built_in_decls[BUILT_IN_GOMP_ATOMIC_END];
5310 t = build_function_call_expr (t, NULL);
5311 gimplify_and_add (t, pre_p);
5312
5313 *expr_p = NULL;
5314 return GS_ALL_DONE;
5315 }
5316
5317 /* Gimplify an OMP_ATOMIC statement. */
5318
5319 static enum gimplify_status
gimplify_omp_atomic(tree * expr_p,tree * pre_p)5320 gimplify_omp_atomic (tree *expr_p, tree *pre_p)
5321 {
5322 tree addr = TREE_OPERAND (*expr_p, 0);
5323 tree rhs = TREE_OPERAND (*expr_p, 1);
5324 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
5325 HOST_WIDE_INT index;
5326
5327 /* Make sure the type is one of the supported sizes. */
5328 index = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
5329 index = exact_log2 (index);
5330 if (index >= 0 && index <= 4)
5331 {
5332 enum gimplify_status gs;
5333 unsigned int align;
5334
5335 if (DECL_P (TREE_OPERAND (addr, 0)))
5336 align = DECL_ALIGN_UNIT (TREE_OPERAND (addr, 0));
5337 else if (TREE_CODE (TREE_OPERAND (addr, 0)) == COMPONENT_REF
5338 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (addr, 0), 1))
5339 == FIELD_DECL)
5340 align = DECL_ALIGN_UNIT (TREE_OPERAND (TREE_OPERAND (addr, 0), 1));
5341 else
5342 align = TYPE_ALIGN_UNIT (type);
5343
5344 /* __sync builtins require strict data alignment. */
5345 if (exact_log2 (align) >= index)
5346 {
5347 /* When possible, use specialized atomic update functions. */
5348 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
5349 {
5350 gs = gimplify_omp_atomic_fetch_op (expr_p, addr, rhs, index);
5351 if (gs != GS_UNHANDLED)
5352 return gs;
5353 }
5354
5355 /* If we don't have specialized __sync builtins, try and implement
5356 as a compare and swap loop. */
5357 gs = gimplify_omp_atomic_pipeline (expr_p, pre_p, addr, rhs, index);
5358 if (gs != GS_UNHANDLED)
5359 return gs;
5360 }
5361 }
5362
5363 /* The ultimate fallback is wrapping the operation in a mutex. */
5364 return gimplify_omp_atomic_mutex (expr_p, pre_p, addr, rhs);
5365 }
5366
5367 /* Gimplifies the expression tree pointed to by EXPR_P. Return 0 if
5368 gimplification failed.
5369
5370 PRE_P points to the list where side effects that must happen before
5371 EXPR should be stored.
5372
5373 POST_P points to the list where side effects that must happen after
5374 EXPR should be stored, or NULL if there is no suitable list. In
5375 that case, we copy the result to a temporary, emit the
5376 post-effects, and then return the temporary.
5377
5378 GIMPLE_TEST_F points to a function that takes a tree T and
5379 returns nonzero if T is in the GIMPLE form requested by the
5380 caller. The GIMPLE predicates are in tree-gimple.c.
5381
5382 This test is used twice. Before gimplification, the test is
5383 invoked to determine whether *EXPR_P is already gimple enough. If
5384 that fails, *EXPR_P is gimplified according to its code and
5385 GIMPLE_TEST_F is called again. If the test still fails, then a new
5386 temporary variable is created and assigned the value of the
5387 gimplified expression.
5388
5389 FALLBACK tells the function what sort of a temporary we want. If the 1
5390 bit is set, an rvalue is OK. If the 2 bit is set, an lvalue is OK.
5391 If both are set, either is OK, but an lvalue is preferable.
5392
5393 The return value is either GS_ERROR or GS_ALL_DONE, since this function
5394 iterates until solution. */
5395
5396 enum gimplify_status
gimplify_expr(tree * expr_p,tree * pre_p,tree * post_p,bool (* gimple_test_f)(tree),fallback_t fallback)5397 gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
5398 bool (* gimple_test_f) (tree), fallback_t fallback)
5399 {
5400 tree tmp;
5401 tree internal_pre = NULL_TREE;
5402 tree internal_post = NULL_TREE;
5403 tree save_expr;
5404 int is_statement = (pre_p == NULL);
5405 location_t saved_location;
5406 enum gimplify_status ret;
5407
5408 save_expr = *expr_p;
5409 if (save_expr == NULL_TREE)
5410 return GS_ALL_DONE;
5411
5412 /* We used to check the predicate here and return immediately if it
5413 succeeds. This is wrong; the design is for gimplification to be
5414 idempotent, and for the predicates to only test for valid forms, not
5415 whether they are fully simplified. */
5416
5417 /* Set up our internal queues if needed. */
5418 if (pre_p == NULL)
5419 pre_p = &internal_pre;
5420 if (post_p == NULL)
5421 post_p = &internal_post;
5422
5423 saved_location = input_location;
5424 if (save_expr != error_mark_node
5425 && EXPR_HAS_LOCATION (*expr_p))
5426 input_location = EXPR_LOCATION (*expr_p);
5427
5428 /* Loop over the specific gimplifiers until the toplevel node
5429 remains the same. */
5430 do
5431 {
5432 /* Strip away as many useless type conversions as possible
5433 at the toplevel. */
5434 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
5435
5436 /* Remember the expr. */
5437 save_expr = *expr_p;
5438
5439 /* Die, die, die, my darling. */
5440 if (save_expr == error_mark_node
5441 || (TREE_TYPE (save_expr)
5442 && TREE_TYPE (save_expr) == error_mark_node))
5443 {
5444 ret = GS_ERROR;
5445 break;
5446 }
5447
5448 /* Do any language-specific gimplification. */
5449 ret = lang_hooks.gimplify_expr (expr_p, pre_p, post_p);
5450 if (ret == GS_OK)
5451 {
5452 if (*expr_p == NULL_TREE)
5453 break;
5454 if (*expr_p != save_expr)
5455 continue;
5456 }
5457 else if (ret != GS_UNHANDLED)
5458 break;
5459
5460 ret = GS_OK;
5461 switch (TREE_CODE (*expr_p))
5462 {
5463 /* First deal with the special cases. */
5464
5465 case POSTINCREMENT_EXPR:
5466 case POSTDECREMENT_EXPR:
5467 case PREINCREMENT_EXPR:
5468 case PREDECREMENT_EXPR:
5469 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
5470 fallback != fb_none);
5471 break;
5472
5473 case ARRAY_REF:
5474 case ARRAY_RANGE_REF:
5475 case REALPART_EXPR:
5476 case IMAGPART_EXPR:
5477 case COMPONENT_REF:
5478 case VIEW_CONVERT_EXPR:
5479 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
5480 fallback ? fallback : fb_rvalue);
5481 break;
5482
5483 case COND_EXPR:
5484 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
5485 /* C99 code may assign to an array in a structure value of a
5486 conditional expression, and this has undefined behavior
5487 only on execution, so create a temporary if an lvalue is
5488 required. */
5489 if (fallback == fb_lvalue)
5490 {
5491 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
5492 lang_hooks.mark_addressable (*expr_p);
5493 }
5494 break;
5495
5496 case CALL_EXPR:
5497 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
5498 /* C99 code may assign to an array in a structure returned
5499 from a function, and this has undefined behavior only on
5500 execution, so create a temporary if an lvalue is
5501 required. */
5502 if (fallback == fb_lvalue)
5503 {
5504 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
5505 lang_hooks.mark_addressable (*expr_p);
5506 }
5507 break;
5508
5509 case TREE_LIST:
5510 gcc_unreachable ();
5511
5512 case COMPOUND_EXPR:
5513 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
5514 break;
5515
5516 case MODIFY_EXPR:
5517 case INIT_EXPR:
5518 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
5519 fallback != fb_none);
5520
5521 /* The distinction between MODIFY_EXPR and INIT_EXPR is no longer
5522 useful. */
5523 if (*expr_p && TREE_CODE (*expr_p) == INIT_EXPR)
5524 TREE_SET_CODE (*expr_p, MODIFY_EXPR);
5525 break;
5526
5527 case TRUTH_ANDIF_EXPR:
5528 case TRUTH_ORIF_EXPR:
5529 ret = gimplify_boolean_expr (expr_p);
5530 break;
5531
5532 case TRUTH_NOT_EXPR:
5533 TREE_OPERAND (*expr_p, 0)
5534 = gimple_boolify (TREE_OPERAND (*expr_p, 0));
5535 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5536 is_gimple_val, fb_rvalue);
5537 recalculate_side_effects (*expr_p);
5538 break;
5539
5540 case ADDR_EXPR:
5541 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
5542 break;
5543
5544 case VA_ARG_EXPR:
5545 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
5546 break;
5547
5548 case CONVERT_EXPR:
5549 case NOP_EXPR:
5550 if (IS_EMPTY_STMT (*expr_p))
5551 {
5552 ret = GS_ALL_DONE;
5553 break;
5554 }
5555
5556 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
5557 || fallback == fb_none)
5558 {
5559 /* Just strip a conversion to void (or in void context) and
5560 try again. */
5561 *expr_p = TREE_OPERAND (*expr_p, 0);
5562 break;
5563 }
5564
5565 ret = gimplify_conversion (expr_p);
5566 if (ret == GS_ERROR)
5567 break;
5568 if (*expr_p != save_expr)
5569 break;
5570 /* FALLTHRU */
5571
5572 case FIX_TRUNC_EXPR:
5573 case FIX_CEIL_EXPR:
5574 case FIX_FLOOR_EXPR:
5575 case FIX_ROUND_EXPR:
5576 /* unary_expr: ... | '(' cast ')' val | ... */
5577 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5578 is_gimple_val, fb_rvalue);
5579 recalculate_side_effects (*expr_p);
5580 break;
5581
5582 case INDIRECT_REF:
5583 *expr_p = fold_indirect_ref (*expr_p);
5584 if (*expr_p != save_expr)
5585 break;
5586 /* else fall through. */
5587 case ALIGN_INDIRECT_REF:
5588 case MISALIGNED_INDIRECT_REF:
5589 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5590 is_gimple_reg, fb_rvalue);
5591 recalculate_side_effects (*expr_p);
5592 break;
5593
5594 /* Constants need not be gimplified. */
5595 case INTEGER_CST:
5596 case REAL_CST:
5597 case STRING_CST:
5598 case COMPLEX_CST:
5599 case VECTOR_CST:
5600 ret = GS_ALL_DONE;
5601 break;
5602
5603 case CONST_DECL:
5604 /* If we require an lvalue, such as for ADDR_EXPR, retain the
5605 CONST_DECL node. Otherwise the decl is replaceable by its
5606 value. */
5607 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
5608 if (fallback & fb_lvalue)
5609 ret = GS_ALL_DONE;
5610 else
5611 *expr_p = DECL_INITIAL (*expr_p);
5612 break;
5613
5614 case DECL_EXPR:
5615 ret = gimplify_decl_expr (expr_p);
5616 break;
5617
5618 case EXC_PTR_EXPR:
5619 /* FIXME make this a decl. */
5620 ret = GS_ALL_DONE;
5621 break;
5622
5623 case BIND_EXPR:
5624 ret = gimplify_bind_expr (expr_p, pre_p);
5625 break;
5626
5627 case LOOP_EXPR:
5628 ret = gimplify_loop_expr (expr_p, pre_p);
5629 break;
5630
5631 case SWITCH_EXPR:
5632 ret = gimplify_switch_expr (expr_p, pre_p);
5633 break;
5634
5635 case EXIT_EXPR:
5636 ret = gimplify_exit_expr (expr_p);
5637 break;
5638
5639 case GOTO_EXPR:
5640 /* If the target is not LABEL, then it is a computed jump
5641 and the target needs to be gimplified. */
5642 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
5643 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
5644 NULL, is_gimple_val, fb_rvalue);
5645 break;
5646
5647 case LABEL_EXPR:
5648 ret = GS_ALL_DONE;
5649 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
5650 == current_function_decl);
5651 break;
5652
5653 case CASE_LABEL_EXPR:
5654 ret = gimplify_case_label_expr (expr_p);
5655 break;
5656
5657 case RETURN_EXPR:
5658 ret = gimplify_return_expr (*expr_p, pre_p);
5659 break;
5660
5661 case CONSTRUCTOR:
5662 /* Don't reduce this in place; let gimplify_init_constructor work its
5663 magic. Buf if we're just elaborating this for side effects, just
5664 gimplify any element that has side-effects. */
5665 if (fallback == fb_none)
5666 {
5667 unsigned HOST_WIDE_INT ix;
5668 constructor_elt *ce;
5669 tree temp = NULL_TREE;
5670 for (ix = 0;
5671 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (*expr_p),
5672 ix, ce);
5673 ix++)
5674 if (TREE_SIDE_EFFECTS (ce->value))
5675 append_to_statement_list (ce->value, &temp);
5676
5677 *expr_p = temp;
5678 ret = GS_OK;
5679 }
5680 /* C99 code may assign to an array in a constructed
5681 structure or union, and this has undefined behavior only
5682 on execution, so create a temporary if an lvalue is
5683 required. */
5684 else if (fallback == fb_lvalue)
5685 {
5686 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
5687 lang_hooks.mark_addressable (*expr_p);
5688 }
5689 else
5690 ret = GS_ALL_DONE;
5691 break;
5692
5693 /* The following are special cases that are not handled by the
5694 original GIMPLE grammar. */
5695
5696 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
5697 eliminated. */
5698 case SAVE_EXPR:
5699 ret = gimplify_save_expr (expr_p, pre_p, post_p);
5700 break;
5701
5702 case BIT_FIELD_REF:
5703 {
5704 enum gimplify_status r0, r1, r2;
5705
5706 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5707 is_gimple_lvalue, fb_either);
5708 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
5709 is_gimple_val, fb_rvalue);
5710 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p, post_p,
5711 is_gimple_val, fb_rvalue);
5712 recalculate_side_effects (*expr_p);
5713
5714 ret = MIN (r0, MIN (r1, r2));
5715 }
5716 break;
5717
5718 case NON_LVALUE_EXPR:
5719 /* This should have been stripped above. */
5720 gcc_unreachable ();
5721
5722 case ASM_EXPR:
5723 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
5724 break;
5725
5726 case TRY_FINALLY_EXPR:
5727 case TRY_CATCH_EXPR:
5728 gimplify_to_stmt_list (&TREE_OPERAND (*expr_p, 0));
5729 gimplify_to_stmt_list (&TREE_OPERAND (*expr_p, 1));
5730 ret = GS_ALL_DONE;
5731 break;
5732
5733 case CLEANUP_POINT_EXPR:
5734 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
5735 break;
5736
5737 case TARGET_EXPR:
5738 ret = gimplify_target_expr (expr_p, pre_p, post_p);
5739 break;
5740
5741 case CATCH_EXPR:
5742 gimplify_to_stmt_list (&CATCH_BODY (*expr_p));
5743 ret = GS_ALL_DONE;
5744 break;
5745
5746 case EH_FILTER_EXPR:
5747 gimplify_to_stmt_list (&EH_FILTER_FAILURE (*expr_p));
5748 ret = GS_ALL_DONE;
5749 break;
5750
5751 case OBJ_TYPE_REF:
5752 {
5753 enum gimplify_status r0, r1;
5754 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p, post_p,
5755 is_gimple_val, fb_rvalue);
5756 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p, post_p,
5757 is_gimple_val, fb_rvalue);
5758 ret = MIN (r0, r1);
5759 }
5760 break;
5761
5762 case LABEL_DECL:
5763 /* We get here when taking the address of a label. We mark
5764 the label as "forced"; meaning it can never be removed and
5765 it is a potential target for any computed goto. */
5766 FORCED_LABEL (*expr_p) = 1;
5767 ret = GS_ALL_DONE;
5768 break;
5769
5770 case STATEMENT_LIST:
5771 ret = gimplify_statement_list (expr_p, pre_p);
5772 break;
5773
5774 case WITH_SIZE_EXPR:
5775 {
5776 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
5777 post_p == &internal_post ? NULL : post_p,
5778 gimple_test_f, fallback);
5779 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
5780 is_gimple_val, fb_rvalue);
5781 }
5782 break;
5783
5784 case VAR_DECL:
5785 case PARM_DECL:
5786 ret = gimplify_var_or_parm_decl (expr_p);
5787 break;
5788
5789 case RESULT_DECL:
5790 /* When within an OpenMP context, notice uses of variables. */
5791 if (gimplify_omp_ctxp)
5792 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
5793 ret = GS_ALL_DONE;
5794 break;
5795
5796 case SSA_NAME:
5797 /* Allow callbacks into the gimplifier during optimization. */
5798 ret = GS_ALL_DONE;
5799 break;
5800
5801 case OMP_PARALLEL:
5802 ret = gimplify_omp_parallel (expr_p, pre_p);
5803 break;
5804
5805 case OMP_FOR:
5806 ret = gimplify_omp_for (expr_p, pre_p);
5807 break;
5808
5809 case OMP_SECTIONS:
5810 case OMP_SINGLE:
5811 ret = gimplify_omp_workshare (expr_p, pre_p);
5812 break;
5813
5814 case OMP_SECTION:
5815 case OMP_MASTER:
5816 case OMP_ORDERED:
5817 case OMP_CRITICAL:
5818 gimplify_to_stmt_list (&OMP_BODY (*expr_p));
5819 break;
5820
5821 case OMP_ATOMIC:
5822 ret = gimplify_omp_atomic (expr_p, pre_p);
5823 break;
5824
5825 case OMP_RETURN:
5826 case OMP_CONTINUE:
5827 ret = GS_ALL_DONE;
5828 break;
5829
5830 default:
5831 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
5832 {
5833 case tcc_comparison:
5834 /* Handle comparison of objects of non scalar mode aggregates
5835 with a call to memcmp. It would be nice to only have to do
5836 this for variable-sized objects, but then we'd have to allow
5837 the same nest of reference nodes we allow for MODIFY_EXPR and
5838 that's too complex.
5839
5840 Compare scalar mode aggregates as scalar mode values. Using
5841 memcmp for them would be very inefficient at best, and is
5842 plain wrong if bitfields are involved. */
5843
5844 {
5845 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
5846
5847 if (!AGGREGATE_TYPE_P (type))
5848 goto expr_2;
5849 else if (TYPE_MODE (type) != BLKmode)
5850 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
5851 else
5852 ret = gimplify_variable_sized_compare (expr_p);
5853
5854 break;
5855 }
5856
5857 /* If *EXPR_P does not need to be special-cased, handle it
5858 according to its class. */
5859 case tcc_unary:
5860 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
5861 post_p, is_gimple_val, fb_rvalue);
5862 break;
5863
5864 case tcc_binary:
5865 expr_2:
5866 {
5867 enum gimplify_status r0, r1;
5868
5869 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
5870 post_p, is_gimple_val, fb_rvalue);
5871 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
5872 post_p, is_gimple_val, fb_rvalue);
5873
5874 ret = MIN (r0, r1);
5875 break;
5876 }
5877
5878 case tcc_declaration:
5879 case tcc_constant:
5880 ret = GS_ALL_DONE;
5881 goto dont_recalculate;
5882
5883 default:
5884 gcc_assert (TREE_CODE (*expr_p) == TRUTH_AND_EXPR
5885 || TREE_CODE (*expr_p) == TRUTH_OR_EXPR
5886 || TREE_CODE (*expr_p) == TRUTH_XOR_EXPR);
5887 goto expr_2;
5888 }
5889
5890 recalculate_side_effects (*expr_p);
5891 dont_recalculate:
5892 break;
5893 }
5894
5895 /* If we replaced *expr_p, gimplify again. */
5896 if (ret == GS_OK && (*expr_p == NULL || *expr_p == save_expr))
5897 ret = GS_ALL_DONE;
5898 }
5899 while (ret == GS_OK);
5900
5901 /* If we encountered an error_mark somewhere nested inside, either
5902 stub out the statement or propagate the error back out. */
5903 if (ret == GS_ERROR)
5904 {
5905 if (is_statement)
5906 *expr_p = NULL;
5907 goto out;
5908 }
5909
5910 /* This was only valid as a return value from the langhook, which
5911 we handled. Make sure it doesn't escape from any other context. */
5912 gcc_assert (ret != GS_UNHANDLED);
5913
5914 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
5915 {
5916 /* We aren't looking for a value, and we don't have a valid
5917 statement. If it doesn't have side-effects, throw it away. */
5918 if (!TREE_SIDE_EFFECTS (*expr_p))
5919 *expr_p = NULL;
5920 else if (!TREE_THIS_VOLATILE (*expr_p))
5921 {
5922 /* This is probably a _REF that contains something nested that
5923 has side effects. Recurse through the operands to find it. */
5924 enum tree_code code = TREE_CODE (*expr_p);
5925
5926 switch (code)
5927 {
5928 case COMPONENT_REF:
5929 case REALPART_EXPR:
5930 case IMAGPART_EXPR:
5931 case VIEW_CONVERT_EXPR:
5932 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5933 gimple_test_f, fallback);
5934 break;
5935
5936 case ARRAY_REF:
5937 case ARRAY_RANGE_REF:
5938 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5939 gimple_test_f, fallback);
5940 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
5941 gimple_test_f, fallback);
5942 break;
5943
5944 default:
5945 /* Anything else with side-effects must be converted to
5946 a valid statement before we get here. */
5947 gcc_unreachable ();
5948 }
5949
5950 *expr_p = NULL;
5951 }
5952 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
5953 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
5954 {
5955 /* Historically, the compiler has treated a bare reference
5956 to a non-BLKmode volatile lvalue as forcing a load. */
5957 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
5958 /* Normally, we do not want to create a temporary for a
5959 TREE_ADDRESSABLE type because such a type should not be
5960 copied by bitwise-assignment. However, we make an
5961 exception here, as all we are doing here is ensuring that
5962 we read the bytes that make up the type. We use
5963 create_tmp_var_raw because create_tmp_var will abort when
5964 given a TREE_ADDRESSABLE type. */
5965 tree tmp = create_tmp_var_raw (type, "vol");
5966 gimple_add_tmp_var (tmp);
5967 *expr_p = build2 (MODIFY_EXPR, type, tmp, *expr_p);
5968 }
5969 else
5970 /* We can't do anything useful with a volatile reference to
5971 an incomplete type, so just throw it away. Likewise for
5972 a BLKmode type, since any implicit inner load should
5973 already have been turned into an explicit one by the
5974 gimplification process. */
5975 *expr_p = NULL;
5976 }
5977
5978 /* If we are gimplifying at the statement level, we're done. Tack
5979 everything together and replace the original statement with the
5980 gimplified form. */
5981 if (fallback == fb_none || is_statement)
5982 {
5983 if (internal_pre || internal_post)
5984 {
5985 append_to_statement_list (*expr_p, &internal_pre);
5986 append_to_statement_list (internal_post, &internal_pre);
5987 annotate_all_with_locus (&internal_pre, input_location);
5988 *expr_p = internal_pre;
5989 }
5990 else if (!*expr_p)
5991 ;
5992 else if (TREE_CODE (*expr_p) == STATEMENT_LIST)
5993 annotate_all_with_locus (expr_p, input_location);
5994 else
5995 annotate_one_with_locus (*expr_p, input_location);
5996 goto out;
5997 }
5998
5999 /* Otherwise we're gimplifying a subexpression, so the resulting value is
6000 interesting. */
6001
6002 /* If it's sufficiently simple already, we're done. Unless we are
6003 handling some post-effects internally; if that's the case, we need to
6004 copy into a temp before adding the post-effects to the tree. */
6005 if (!internal_post && (*gimple_test_f) (*expr_p))
6006 goto out;
6007
6008 /* Otherwise, we need to create a new temporary for the gimplified
6009 expression. */
6010
6011 /* We can't return an lvalue if we have an internal postqueue. The
6012 object the lvalue refers to would (probably) be modified by the
6013 postqueue; we need to copy the value out first, which means an
6014 rvalue. */
6015 if ((fallback & fb_lvalue) && !internal_post
6016 && is_gimple_addressable (*expr_p))
6017 {
6018 /* An lvalue will do. Take the address of the expression, store it
6019 in a temporary, and replace the expression with an INDIRECT_REF of
6020 that temporary. */
6021 tmp = build_fold_addr_expr (*expr_p);
6022 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
6023 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (tmp)), tmp);
6024 }
6025 else if ((fallback & fb_rvalue) && is_gimple_formal_tmp_rhs (*expr_p))
6026 {
6027 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
6028
6029 /* An rvalue will do. Assign the gimplified expression into a new
6030 temporary TMP and replace the original expression with TMP. */
6031
6032 if (internal_post || (fallback & fb_lvalue))
6033 /* The postqueue might change the value of the expression between
6034 the initialization and use of the temporary, so we can't use a
6035 formal temp. FIXME do we care? */
6036 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6037 else
6038 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
6039
6040 if (TREE_CODE (*expr_p) != SSA_NAME)
6041 DECL_GIMPLE_FORMAL_TEMP_P (*expr_p) = 1;
6042 }
6043 else
6044 {
6045 #ifdef ENABLE_CHECKING
6046 if (!(fallback & fb_mayfail))
6047 {
6048 fprintf (stderr, "gimplification failed:\n");
6049 print_generic_expr (stderr, *expr_p, 0);
6050 debug_tree (*expr_p);
6051 internal_error ("gimplification failed");
6052 }
6053 #endif
6054 gcc_assert (fallback & fb_mayfail);
6055 /* If this is an asm statement, and the user asked for the
6056 impossible, don't die. Fail and let gimplify_asm_expr
6057 issue an error. */
6058 ret = GS_ERROR;
6059 goto out;
6060 }
6061
6062 /* Make sure the temporary matches our predicate. */
6063 gcc_assert ((*gimple_test_f) (*expr_p));
6064
6065 if (internal_post)
6066 {
6067 annotate_all_with_locus (&internal_post, input_location);
6068 append_to_statement_list (internal_post, pre_p);
6069 }
6070
6071 out:
6072 input_location = saved_location;
6073 return ret;
6074 }
6075
6076 /* Look through TYPE for variable-sized objects and gimplify each such
6077 size that we find. Add to LIST_P any statements generated. */
6078
6079 void
gimplify_type_sizes(tree type,tree * list_p)6080 gimplify_type_sizes (tree type, tree *list_p)
6081 {
6082 tree field, t;
6083
6084 if (type == NULL || type == error_mark_node)
6085 return;
6086
6087 /* We first do the main variant, then copy into any other variants. */
6088 type = TYPE_MAIN_VARIANT (type);
6089
6090 /* Avoid infinite recursion. */
6091 if (TYPE_SIZES_GIMPLIFIED (type))
6092 return;
6093
6094 TYPE_SIZES_GIMPLIFIED (type) = 1;
6095
6096 switch (TREE_CODE (type))
6097 {
6098 case INTEGER_TYPE:
6099 case ENUMERAL_TYPE:
6100 case BOOLEAN_TYPE:
6101 case REAL_TYPE:
6102 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
6103 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
6104
6105 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6106 {
6107 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
6108 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
6109 }
6110 break;
6111
6112 case ARRAY_TYPE:
6113 /* These types may not have declarations, so handle them here. */
6114 gimplify_type_sizes (TREE_TYPE (type), list_p);
6115 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
6116 break;
6117
6118 case RECORD_TYPE:
6119 case UNION_TYPE:
6120 case QUAL_UNION_TYPE:
6121 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
6122 if (TREE_CODE (field) == FIELD_DECL)
6123 {
6124 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
6125 gimplify_type_sizes (TREE_TYPE (field), list_p);
6126 }
6127 break;
6128
6129 case POINTER_TYPE:
6130 case REFERENCE_TYPE:
6131 /* We used to recurse on the pointed-to type here, which turned out to
6132 be incorrect because its definition might refer to variables not
6133 yet initialized at this point if a forward declaration is involved.
6134
6135 It was actually useful for anonymous pointed-to types to ensure
6136 that the sizes evaluation dominates every possible later use of the
6137 values. Restricting to such types here would be safe since there
6138 is no possible forward declaration around, but would introduce an
6139 undesirable middle-end semantic to anonymity. We then defer to
6140 front-ends the responsibility of ensuring that the sizes are
6141 evaluated both early and late enough, e.g. by attaching artificial
6142 type declarations to the tree. */
6143 break;
6144
6145 default:
6146 break;
6147 }
6148
6149 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
6150 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
6151
6152 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6153 {
6154 TYPE_SIZE (t) = TYPE_SIZE (type);
6155 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
6156 TYPE_SIZES_GIMPLIFIED (t) = 1;
6157 }
6158 }
6159
6160 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
6161 a size or position, has had all of its SAVE_EXPRs evaluated.
6162 We add any required statements to STMT_P. */
6163
6164 void
gimplify_one_sizepos(tree * expr_p,tree * stmt_p)6165 gimplify_one_sizepos (tree *expr_p, tree *stmt_p)
6166 {
6167 tree type, expr = *expr_p;
6168
6169 /* We don't do anything if the value isn't there, is constant, or contains
6170 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
6171 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
6172 will want to replace it with a new variable, but that will cause problems
6173 if this type is from outside the function. It's OK to have that here. */
6174 if (expr == NULL_TREE || TREE_CONSTANT (expr)
6175 || TREE_CODE (expr) == VAR_DECL
6176 || CONTAINS_PLACEHOLDER_P (expr))
6177 return;
6178
6179 type = TREE_TYPE (expr);
6180 *expr_p = unshare_expr (expr);
6181
6182 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
6183 expr = *expr_p;
6184
6185 /* Verify that we've an exact type match with the original expression.
6186 In particular, we do not wish to drop a "sizetype" in favour of a
6187 type of similar dimensions. We don't want to pollute the generic
6188 type-stripping code with this knowledge because it doesn't matter
6189 for the bulk of GENERIC/GIMPLE. It only matters that TYPE_SIZE_UNIT
6190 and friends retain their "sizetype-ness". */
6191 if (TREE_TYPE (expr) != type
6192 && TREE_CODE (type) == INTEGER_TYPE
6193 && TYPE_IS_SIZETYPE (type))
6194 {
6195 tree tmp;
6196
6197 *expr_p = create_tmp_var (type, NULL);
6198 tmp = build1 (NOP_EXPR, type, expr);
6199 tmp = build2 (MODIFY_EXPR, type, *expr_p, tmp);
6200 if (EXPR_HAS_LOCATION (expr))
6201 SET_EXPR_LOCUS (tmp, EXPR_LOCUS (expr));
6202 else
6203 SET_EXPR_LOCATION (tmp, input_location);
6204
6205 gimplify_and_add (tmp, stmt_p);
6206 }
6207 }
6208
6209 #ifdef ENABLE_CHECKING
6210 /* Compare types A and B for a "close enough" match. */
6211
6212 static bool
cpt_same_type(tree a,tree b)6213 cpt_same_type (tree a, tree b)
6214 {
6215 if (lang_hooks.types_compatible_p (a, b))
6216 return true;
6217
6218 /* ??? The C++ FE decomposes METHOD_TYPES to FUNCTION_TYPES and doesn't
6219 link them together. This routine is intended to catch type errors
6220 that will affect the optimizers, and the optimizers don't add new
6221 dereferences of function pointers, so ignore it. */
6222 if ((TREE_CODE (a) == FUNCTION_TYPE || TREE_CODE (a) == METHOD_TYPE)
6223 && (TREE_CODE (b) == FUNCTION_TYPE || TREE_CODE (b) == METHOD_TYPE))
6224 return true;
6225
6226 /* ??? The C FE pushes type qualifiers after the fact into the type of
6227 the element from the type of the array. See build_unary_op's handling
6228 of ADDR_EXPR. This seems wrong -- if we were going to do this, we
6229 should have done it when creating the variable in the first place.
6230 Alternately, why aren't the two array types made variants? */
6231 if (TREE_CODE (a) == ARRAY_TYPE && TREE_CODE (b) == ARRAY_TYPE)
6232 return cpt_same_type (TREE_TYPE (a), TREE_TYPE (b));
6233
6234 /* And because of those, we have to recurse down through pointers. */
6235 if (POINTER_TYPE_P (a) && POINTER_TYPE_P (b))
6236 return cpt_same_type (TREE_TYPE (a), TREE_TYPE (b));
6237
6238 return false;
6239 }
6240
6241 /* Check for some cases of the front end missing cast expressions.
6242 The type of a dereference should correspond to the pointer type;
6243 similarly the type of an address should match its object. */
6244
6245 static tree
check_pointer_types_r(tree * tp,int * walk_subtrees ATTRIBUTE_UNUSED,void * data ATTRIBUTE_UNUSED)6246 check_pointer_types_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
6247 void *data ATTRIBUTE_UNUSED)
6248 {
6249 tree t = *tp;
6250 tree ptype, otype, dtype;
6251
6252 switch (TREE_CODE (t))
6253 {
6254 case INDIRECT_REF:
6255 case ARRAY_REF:
6256 otype = TREE_TYPE (t);
6257 ptype = TREE_TYPE (TREE_OPERAND (t, 0));
6258 dtype = TREE_TYPE (ptype);
6259 gcc_assert (cpt_same_type (otype, dtype));
6260 break;
6261
6262 case ADDR_EXPR:
6263 ptype = TREE_TYPE (t);
6264 otype = TREE_TYPE (TREE_OPERAND (t, 0));
6265 dtype = TREE_TYPE (ptype);
6266 if (!cpt_same_type (otype, dtype))
6267 {
6268 /* &array is allowed to produce a pointer to the element, rather than
6269 a pointer to the array type. We must allow this in order to
6270 properly represent assigning the address of an array in C into
6271 pointer to the element type. */
6272 gcc_assert (TREE_CODE (otype) == ARRAY_TYPE
6273 && POINTER_TYPE_P (ptype)
6274 && cpt_same_type (TREE_TYPE (otype), dtype));
6275 break;
6276 }
6277 break;
6278
6279 default:
6280 return NULL_TREE;
6281 }
6282
6283
6284 return NULL_TREE;
6285 }
6286 #endif
6287
6288 /* Gimplify the body of statements pointed to by BODY_P. FNDECL is the
6289 function decl containing BODY. */
6290
6291 void
gimplify_body(tree * body_p,tree fndecl,bool do_parms)6292 gimplify_body (tree *body_p, tree fndecl, bool do_parms)
6293 {
6294 location_t saved_location = input_location;
6295 tree body, parm_stmts;
6296
6297 timevar_push (TV_TREE_GIMPLIFY);
6298
6299 gcc_assert (gimplify_ctxp == NULL);
6300 push_gimplify_context ();
6301
6302 /* Unshare most shared trees in the body and in that of any nested functions.
6303 It would seem we don't have to do this for nested functions because
6304 they are supposed to be output and then the outer function gimplified
6305 first, but the g++ front end doesn't always do it that way. */
6306 unshare_body (body_p, fndecl);
6307 unvisit_body (body_p, fndecl);
6308
6309 /* Make sure input_location isn't set to something wierd. */
6310 input_location = DECL_SOURCE_LOCATION (fndecl);
6311
6312 /* Resolve callee-copies. This has to be done before processing
6313 the body so that DECL_VALUE_EXPR gets processed correctly. */
6314 parm_stmts = do_parms ? gimplify_parameters () : NULL;
6315
6316 /* Gimplify the function's body. */
6317 gimplify_stmt (body_p);
6318 body = *body_p;
6319
6320 if (!body)
6321 body = alloc_stmt_list ();
6322 else if (TREE_CODE (body) == STATEMENT_LIST)
6323 {
6324 tree t = expr_only (*body_p);
6325 if (t)
6326 body = t;
6327 }
6328
6329 /* If there isn't an outer BIND_EXPR, add one. */
6330 if (TREE_CODE (body) != BIND_EXPR)
6331 {
6332 tree b = build3 (BIND_EXPR, void_type_node, NULL_TREE,
6333 NULL_TREE, NULL_TREE);
6334 TREE_SIDE_EFFECTS (b) = 1;
6335 append_to_statement_list_force (body, &BIND_EXPR_BODY (b));
6336 body = b;
6337 }
6338
6339 /* If we had callee-copies statements, insert them at the beginning
6340 of the function. */
6341 if (parm_stmts)
6342 {
6343 append_to_statement_list_force (BIND_EXPR_BODY (body), &parm_stmts);
6344 BIND_EXPR_BODY (body) = parm_stmts;
6345 }
6346
6347 /* Unshare again, in case gimplification was sloppy. */
6348 unshare_all_trees (body);
6349
6350 *body_p = body;
6351
6352 pop_gimplify_context (body);
6353 gcc_assert (gimplify_ctxp == NULL);
6354
6355 #ifdef ENABLE_CHECKING
6356 walk_tree (body_p, check_pointer_types_r, NULL, NULL);
6357 #endif
6358
6359 timevar_pop (TV_TREE_GIMPLIFY);
6360 input_location = saved_location;
6361 }
6362
6363 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
6364 node for the function we want to gimplify. */
6365
6366 void
gimplify_function_tree(tree fndecl)6367 gimplify_function_tree (tree fndecl)
6368 {
6369 tree oldfn, parm, ret;
6370
6371 oldfn = current_function_decl;
6372 current_function_decl = fndecl;
6373 cfun = DECL_STRUCT_FUNCTION (fndecl);
6374 if (cfun == NULL)
6375 allocate_struct_function (fndecl);
6376
6377 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = TREE_CHAIN (parm))
6378 {
6379 /* Preliminarily mark non-addressed complex variables as eligible
6380 for promotion to gimple registers. We'll transform their uses
6381 as we find them. */
6382 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
6383 && !TREE_THIS_VOLATILE (parm)
6384 && !needs_to_live_in_memory (parm))
6385 DECL_COMPLEX_GIMPLE_REG_P (parm) = 1;
6386 }
6387
6388 ret = DECL_RESULT (fndecl);
6389 if (TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
6390 && !needs_to_live_in_memory (ret))
6391 DECL_COMPLEX_GIMPLE_REG_P (ret) = 1;
6392
6393 gimplify_body (&DECL_SAVED_TREE (fndecl), fndecl, true);
6394
6395 /* If we're instrumenting function entry/exit, then prepend the call to
6396 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
6397 catch the exit hook. */
6398 /* ??? Add some way to ignore exceptions for this TFE. */
6399 if (flag_instrument_function_entry_exit
6400 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
6401 && !flag_instrument_functions_exclude_p (fndecl))
6402 {
6403 tree tf, x, bind;
6404
6405 tf = build2 (TRY_FINALLY_EXPR, void_type_node, NULL, NULL);
6406 TREE_SIDE_EFFECTS (tf) = 1;
6407 x = DECL_SAVED_TREE (fndecl);
6408 append_to_statement_list (x, &TREE_OPERAND (tf, 0));
6409 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_EXIT];
6410 x = build_function_call_expr (x, NULL);
6411 append_to_statement_list (x, &TREE_OPERAND (tf, 1));
6412
6413 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
6414 TREE_SIDE_EFFECTS (bind) = 1;
6415 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_ENTER];
6416 x = build_function_call_expr (x, NULL);
6417 append_to_statement_list (x, &BIND_EXPR_BODY (bind));
6418 append_to_statement_list (tf, &BIND_EXPR_BODY (bind));
6419
6420 DECL_SAVED_TREE (fndecl) = bind;
6421 }
6422
6423 current_function_decl = oldfn;
6424 cfun = oldfn ? DECL_STRUCT_FUNCTION (oldfn) : NULL;
6425 }
6426
6427
6428 /* Expands EXPR to list of gimple statements STMTS. If SIMPLE is true,
6429 force the result to be either ssa_name or an invariant, otherwise
6430 just force it to be a rhs expression. If VAR is not NULL, make the
6431 base variable of the final destination be VAR if suitable. */
6432
6433 tree
force_gimple_operand(tree expr,tree * stmts,bool simple,tree var)6434 force_gimple_operand (tree expr, tree *stmts, bool simple, tree var)
6435 {
6436 tree t;
6437 enum gimplify_status ret;
6438 gimple_predicate gimple_test_f;
6439
6440 *stmts = NULL_TREE;
6441
6442 if (is_gimple_val (expr))
6443 return expr;
6444
6445 gimple_test_f = simple ? is_gimple_val : is_gimple_reg_rhs;
6446
6447 push_gimplify_context ();
6448 gimplify_ctxp->into_ssa = in_ssa_p;
6449
6450 if (var)
6451 expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr);
6452
6453 ret = gimplify_expr (&expr, stmts, NULL,
6454 gimple_test_f, fb_rvalue);
6455 gcc_assert (ret != GS_ERROR);
6456
6457 if (referenced_vars)
6458 {
6459 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
6460 add_referenced_var (t);
6461 }
6462
6463 pop_gimplify_context (NULL);
6464
6465 return expr;
6466 }
6467
6468 /* Invokes force_gimple_operand for EXPR with parameters SIMPLE_P and VAR. If
6469 some statements are produced, emits them before BSI. */
6470
6471 tree
force_gimple_operand_bsi(block_stmt_iterator * bsi,tree expr,bool simple_p,tree var)6472 force_gimple_operand_bsi (block_stmt_iterator *bsi, tree expr,
6473 bool simple_p, tree var)
6474 {
6475 tree stmts;
6476
6477 expr = force_gimple_operand (expr, &stmts, simple_p, var);
6478 if (stmts)
6479 bsi_insert_before (bsi, stmts, BSI_SAME_STMT);
6480
6481 return expr;
6482 }
6483
6484 #include "gt-gimplify.h"
6485