xref: /freebsd-12.1/contrib/gcc/function.c (revision 5bfc7db4)
1 /* Expands front end tree to back end RTL for GCC.
2    Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3    1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
4    Free Software Foundation, Inc.
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING.  If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA.  */
22 
23 /* $FreeBSD$ */
24 
25 /* This file handles the generation of rtl code from tree structure
26    at the level of the function as a whole.
27    It creates the rtl expressions for parameters and auto variables
28    and has full responsibility for allocating stack slots.
29 
30    `expand_function_start' is called at the beginning of a function,
31    before the function body is parsed, and `expand_function_end' is
32    called after parsing the body.
33 
34    Call `assign_stack_local' to allocate a stack slot for a local variable.
35    This is usually done during the RTL generation for the function body,
36    but it can also be done in the reload pass when a pseudo-register does
37    not get a hard register.  */
38 
39 #include "config.h"
40 #include "system.h"
41 #include "coretypes.h"
42 #include "tm.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "expr.h"
49 #include "optabs.h"
50 #include "libfuncs.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57 #include "toplev.h"
58 #include "hashtab.h"
59 #include "ggc.h"
60 #include "tm_p.h"
61 #include "integrate.h"
62 #include "langhooks.h"
63 #include "target.h"
64 #include "cfglayout.h"
65 #include "tree-gimple.h"
66 #include "tree-pass.h"
67 #include "predict.h"
68 #include "vecprim.h"
69 
70 #ifndef LOCAL_ALIGNMENT
71 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
72 #endif
73 
74 #ifndef STACK_ALIGNMENT_NEEDED
75 #define STACK_ALIGNMENT_NEEDED 1
76 #endif
77 
78 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
79 
80 /* Some systems use __main in a way incompatible with its use in gcc, in these
81    cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
82    give the same symbol without quotes for an alternative entry point.  You
83    must define both, or neither.  */
84 #ifndef NAME__MAIN
85 #define NAME__MAIN "__main"
86 #endif
87 
88 /* Round a value to the lowest integer less than it that is a multiple of
89    the required alignment.  Avoid using division in case the value is
90    negative.  Assume the alignment is a power of two.  */
91 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
92 
93 /* Similar, but round to the next highest integer that meets the
94    alignment.  */
95 #define CEIL_ROUND(VALUE,ALIGN)	(((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
96 
97 /* Nonzero if function being compiled doesn't contain any calls
98    (ignoring the prologue and epilogue).  This is set prior to
99    local register allocation and is valid for the remaining
100    compiler passes.  */
101 int current_function_is_leaf;
102 
103 /* Nonzero if function being compiled doesn't modify the stack pointer
104    (ignoring the prologue and epilogue).  This is only valid after
105    life_analysis has run.  */
106 int current_function_sp_is_unchanging;
107 
108 /* Nonzero if the function being compiled is a leaf function which only
109    uses leaf registers.  This is valid after reload (specifically after
110    sched2) and is useful only if the port defines LEAF_REGISTERS.  */
111 int current_function_uses_only_leaf_regs;
112 
113 /* Nonzero once virtual register instantiation has been done.
114    assign_stack_local uses frame_pointer_rtx when this is nonzero.
115    calls.c:emit_library_call_value_1 uses it to set up
116    post-instantiation libcalls.  */
117 int virtuals_instantiated;
118 
119 /* APPLE LOCAL begin radar 5732232 - blocks */
120 struct block_sema_info *cur_block;
121 /* APPLE LOCAL end radar 5732232 - blocks */
122 
123 /* Assign unique numbers to labels generated for profiling, debugging, etc.  */
124 static GTY(()) int funcdef_no;
125 
126 /* These variables hold pointers to functions to create and destroy
127    target specific, per-function data structures.  */
128 struct machine_function * (*init_machine_status) (void);
129 
130 /* The currently compiled function.  */
131 struct function *cfun = 0;
132 
133 /* These arrays record the INSN_UIDs of the prologue and epilogue insns.  */
134 static VEC(int,heap) *prologue;
135 static VEC(int,heap) *epilogue;
136 
137 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
138    in this function.  */
139 static VEC(int,heap) *sibcall_epilogue;
140 
141 /* In order to evaluate some expressions, such as function calls returning
142    structures in memory, we need to temporarily allocate stack locations.
143    We record each allocated temporary in the following structure.
144 
145    Associated with each temporary slot is a nesting level.  When we pop up
146    one level, all temporaries associated with the previous level are freed.
147    Normally, all temporaries are freed after the execution of the statement
148    in which they were created.  However, if we are inside a ({...}) grouping,
149    the result may be in a temporary and hence must be preserved.  If the
150    result could be in a temporary, we preserve it if we can determine which
151    one it is in.  If we cannot determine which temporary may contain the
152    result, all temporaries are preserved.  A temporary is preserved by
153    pretending it was allocated at the previous nesting level.
154 
155    Automatic variables are also assigned temporary slots, at the nesting
156    level where they are defined.  They are marked a "kept" so that
157    free_temp_slots will not free them.  */
158 
159 struct temp_slot GTY(())
160 {
161   /* Points to next temporary slot.  */
162   struct temp_slot *next;
163   /* Points to previous temporary slot.  */
164   struct temp_slot *prev;
165 
166   /* The rtx to used to reference the slot.  */
167   rtx slot;
168   /* The rtx used to represent the address if not the address of the
169      slot above.  May be an EXPR_LIST if multiple addresses exist.  */
170   rtx address;
171   /* The alignment (in bits) of the slot.  */
172   unsigned int align;
173   /* The size, in units, of the slot.  */
174   HOST_WIDE_INT size;
175   /* The type of the object in the slot, or zero if it doesn't correspond
176      to a type.  We use this to determine whether a slot can be reused.
177      It can be reused if objects of the type of the new slot will always
178      conflict with objects of the type of the old slot.  */
179   tree type;
180   /* Nonzero if this temporary is currently in use.  */
181   char in_use;
182   /* Nonzero if this temporary has its address taken.  */
183   char addr_taken;
184   /* Nesting level at which this slot is being used.  */
185   int level;
186   /* Nonzero if this should survive a call to free_temp_slots.  */
187   int keep;
188   /* The offset of the slot from the frame_pointer, including extra space
189      for alignment.  This info is for combine_temp_slots.  */
190   HOST_WIDE_INT base_offset;
191   /* The size of the slot, including extra space for alignment.  This
192      info is for combine_temp_slots.  */
193   HOST_WIDE_INT full_size;
194 };
195 
196 /* Forward declarations.  */
197 
198 static rtx assign_stack_local_1 (enum machine_mode, HOST_WIDE_INT, int,
199 				 struct function *);
200 static struct temp_slot *find_temp_slot_from_address (rtx);
201 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
202 static void pad_below (struct args_size *, enum machine_mode, tree);
203 static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
204 static int all_blocks (tree, tree *);
205 static tree *get_block_vector (tree, int *);
206 extern tree debug_find_var_in_block_tree (tree, tree);
207 /* We always define `record_insns' even if it's not used so that we
208    can always export `prologue_epilogue_contains'.  */
209 static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED;
210 static int contains (rtx, VEC(int,heap) **);
211 #ifdef HAVE_return
212 static void emit_return_into_block (basic_block, rtx);
213 #endif
214 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
215 static rtx keep_stack_depressed (rtx);
216 #endif
217 static void prepare_function_start (tree);
218 static void do_clobber_return_reg (rtx, void *);
219 static void do_use_return_reg (rtx, void *);
220 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
221 /* APPLE LOCAL radar 6163705, Blocks prologues  */
222 static rtx find_block_prologue_insns (void);
223 
224 /* Pointer to chain of `struct function' for containing functions.  */
225 struct function *outer_function_chain;
226 
227 /* Given a function decl for a containing function,
228    return the `struct function' for it.  */
229 
230 struct function *
find_function_data(tree decl)231 find_function_data (tree decl)
232 {
233   struct function *p;
234 
235   for (p = outer_function_chain; p; p = p->outer)
236     if (p->decl == decl)
237       return p;
238 
239   gcc_unreachable ();
240 }
241 
242 /* Save the current context for compilation of a nested function.
243    This is called from language-specific code.  The caller should use
244    the enter_nested langhook to save any language-specific state,
245    since this function knows only about language-independent
246    variables.  */
247 
248 void
push_function_context_to(tree context ATTRIBUTE_UNUSED)249 push_function_context_to (tree context ATTRIBUTE_UNUSED)
250 {
251   struct function *p;
252 
253   if (cfun == 0)
254     init_dummy_function_start ();
255   p = cfun;
256 
257   p->outer = outer_function_chain;
258   outer_function_chain = p;
259 
260   lang_hooks.function.enter_nested (p);
261 
262   cfun = 0;
263 }
264 
265 void
push_function_context(void)266 push_function_context (void)
267 {
268   push_function_context_to (current_function_decl);
269 }
270 
271 /* Restore the last saved context, at the end of a nested function.
272    This function is called from language-specific code.  */
273 
274 void
pop_function_context_from(tree context ATTRIBUTE_UNUSED)275 pop_function_context_from (tree context ATTRIBUTE_UNUSED)
276 {
277   struct function *p = outer_function_chain;
278 
279   cfun = p;
280   outer_function_chain = p->outer;
281 
282   current_function_decl = p->decl;
283 
284   lang_hooks.function.leave_nested (p);
285 
286   /* Reset variables that have known state during rtx generation.  */
287   virtuals_instantiated = 0;
288   generating_concat_p = 1;
289 }
290 
291 void
pop_function_context(void)292 pop_function_context (void)
293 {
294   pop_function_context_from (current_function_decl);
295 }
296 
297 /* Clear out all parts of the state in F that can safely be discarded
298    after the function has been parsed, but not compiled, to let
299    garbage collection reclaim the memory.  */
300 
301 void
free_after_parsing(struct function * f)302 free_after_parsing (struct function *f)
303 {
304   /* f->expr->forced_labels is used by code generation.  */
305   /* f->emit->regno_reg_rtx is used by code generation.  */
306   /* f->varasm is used by code generation.  */
307   /* f->eh->eh_return_stub_label is used by code generation.  */
308 
309   lang_hooks.function.final (f);
310 }
311 
312 /* Clear out all parts of the state in F that can safely be discarded
313    after the function has been compiled, to let garbage collection
314    reclaim the memory.  */
315 
316 void
free_after_compilation(struct function * f)317 free_after_compilation (struct function *f)
318 {
319   VEC_free (int, heap, prologue);
320   VEC_free (int, heap, epilogue);
321   VEC_free (int, heap, sibcall_epilogue);
322 
323   f->eh = NULL;
324   f->expr = NULL;
325   f->emit = NULL;
326   f->varasm = NULL;
327   f->machine = NULL;
328   f->cfg = NULL;
329 
330   f->x_avail_temp_slots = NULL;
331   f->x_used_temp_slots = NULL;
332   f->arg_offset_rtx = NULL;
333   f->return_rtx = NULL;
334   f->internal_arg_pointer = NULL;
335   f->x_nonlocal_goto_handler_labels = NULL;
336   f->x_return_label = NULL;
337   f->x_naked_return_label = NULL;
338   f->x_stack_slot_list = NULL;
339   f->x_stack_check_probe_note = NULL;
340   f->x_arg_pointer_save_area = NULL;
341   f->x_parm_birth_insn = NULL;
342   f->epilogue_delay_list = NULL;
343 }
344 
345 /* Allocate fixed slots in the stack frame of the current function.  */
346 
347 /* Return size needed for stack frame based on slots so far allocated in
348    function F.
349    This size counts from zero.  It is not rounded to PREFERRED_STACK_BOUNDARY;
350    the caller may have to do that.  */
351 
352 static HOST_WIDE_INT
get_func_frame_size(struct function * f)353 get_func_frame_size (struct function *f)
354 {
355   if (FRAME_GROWS_DOWNWARD)
356     return -f->x_frame_offset;
357   else
358     return f->x_frame_offset;
359 }
360 
361 /* Return size needed for stack frame based on slots so far allocated.
362    This size counts from zero.  It is not rounded to PREFERRED_STACK_BOUNDARY;
363    the caller may have to do that.  */
364 
365 HOST_WIDE_INT
get_frame_size(void)366 get_frame_size (void)
367 {
368   return get_func_frame_size (cfun);
369 }
370 
371 /* Issue an error message and return TRUE if frame OFFSET overflows in
372    the signed target pointer arithmetics for function FUNC.  Otherwise
373    return FALSE.  */
374 
375 bool
frame_offset_overflow(HOST_WIDE_INT offset,tree func)376 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
377 {
378   unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
379 
380   if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
381 	       /* Leave room for the fixed part of the frame.  */
382 	       - 64 * UNITS_PER_WORD)
383     {
384       error ("%Jtotal size of local objects too large", func);
385       return TRUE;
386     }
387 
388   return FALSE;
389 }
390 
391 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
392    with machine mode MODE.
393 
394    ALIGN controls the amount of alignment for the address of the slot:
395    0 means according to MODE,
396    -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
397    -2 means use BITS_PER_UNIT,
398    positive specifies alignment boundary in bits.
399 
400    We do not round to stack_boundary here.
401 
402    FUNCTION specifies the function to allocate in.  */
403 
404 static rtx
assign_stack_local_1(enum machine_mode mode,HOST_WIDE_INT size,int align,struct function * function)405 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, int align,
406 		      struct function *function)
407 {
408   rtx x, addr;
409   int bigend_correction = 0;
410   unsigned int alignment;
411   int frame_off, frame_alignment, frame_phase;
412 
413   if (align == 0)
414     {
415       tree type;
416 
417       if (mode == BLKmode)
418 	alignment = BIGGEST_ALIGNMENT;
419       else
420 	alignment = GET_MODE_ALIGNMENT (mode);
421 
422       /* Allow the target to (possibly) increase the alignment of this
423 	 stack slot.  */
424       type = lang_hooks.types.type_for_mode (mode, 0);
425       if (type)
426 	alignment = LOCAL_ALIGNMENT (type, alignment);
427 
428       alignment /= BITS_PER_UNIT;
429     }
430   else if (align == -1)
431     {
432       alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
433       size = CEIL_ROUND (size, alignment);
434     }
435   else if (align == -2)
436     alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
437   else
438     alignment = align / BITS_PER_UNIT;
439 
440   if (FRAME_GROWS_DOWNWARD)
441     function->x_frame_offset -= size;
442 
443   /* Ignore alignment we can't do with expected alignment of the boundary.  */
444   if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
445     alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
446 
447   if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
448     function->stack_alignment_needed = alignment * BITS_PER_UNIT;
449 
450   /* Calculate how many bytes the start of local variables is off from
451      stack alignment.  */
452   frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
453   frame_off = STARTING_FRAME_OFFSET % frame_alignment;
454   frame_phase = frame_off ? frame_alignment - frame_off : 0;
455 
456   /* Round the frame offset to the specified alignment.  The default is
457      to always honor requests to align the stack but a port may choose to
458      do its own stack alignment by defining STACK_ALIGNMENT_NEEDED.  */
459   if (STACK_ALIGNMENT_NEEDED
460       || mode != BLKmode
461       || size != 0)
462     {
463       /*  We must be careful here, since FRAME_OFFSET might be negative and
464 	  division with a negative dividend isn't as well defined as we might
465 	  like.  So we instead assume that ALIGNMENT is a power of two and
466 	  use logical operations which are unambiguous.  */
467       if (FRAME_GROWS_DOWNWARD)
468 	function->x_frame_offset
469 	  = (FLOOR_ROUND (function->x_frame_offset - frame_phase,
470 			  (unsigned HOST_WIDE_INT) alignment)
471 	     + frame_phase);
472       else
473 	function->x_frame_offset
474 	  = (CEIL_ROUND (function->x_frame_offset - frame_phase,
475 			 (unsigned HOST_WIDE_INT) alignment)
476 	     + frame_phase);
477     }
478 
479   /* On a big-endian machine, if we are allocating more space than we will use,
480      use the least significant bytes of those that are allocated.  */
481   if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
482     bigend_correction = size - GET_MODE_SIZE (mode);
483 
484   /* If we have already instantiated virtual registers, return the actual
485      address relative to the frame pointer.  */
486   if (function == cfun && virtuals_instantiated)
487     addr = plus_constant (frame_pointer_rtx,
488 			  trunc_int_for_mode
489 			  (frame_offset + bigend_correction
490 			   + STARTING_FRAME_OFFSET, Pmode));
491   else
492     addr = plus_constant (virtual_stack_vars_rtx,
493 			  trunc_int_for_mode
494 			  (function->x_frame_offset + bigend_correction,
495 			   Pmode));
496 
497   if (!FRAME_GROWS_DOWNWARD)
498     function->x_frame_offset += size;
499 
500   x = gen_rtx_MEM (mode, addr);
501   MEM_NOTRAP_P (x) = 1;
502 
503   function->x_stack_slot_list
504     = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
505 
506   if (frame_offset_overflow (function->x_frame_offset, function->decl))
507     function->x_frame_offset = 0;
508 
509   return x;
510 }
511 
512 /* Wrapper around assign_stack_local_1;  assign a local stack slot for the
513    current function.  */
514 
515 rtx
assign_stack_local(enum machine_mode mode,HOST_WIDE_INT size,int align)516 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
517 {
518   return assign_stack_local_1 (mode, size, align, cfun);
519 }
520 
521 
522 /* Removes temporary slot TEMP from LIST.  */
523 
524 static void
cut_slot_from_list(struct temp_slot * temp,struct temp_slot ** list)525 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
526 {
527   if (temp->next)
528     temp->next->prev = temp->prev;
529   if (temp->prev)
530     temp->prev->next = temp->next;
531   else
532     *list = temp->next;
533 
534   temp->prev = temp->next = NULL;
535 }
536 
537 /* Inserts temporary slot TEMP to LIST.  */
538 
539 static void
insert_slot_to_list(struct temp_slot * temp,struct temp_slot ** list)540 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
541 {
542   temp->next = *list;
543   if (*list)
544     (*list)->prev = temp;
545   temp->prev = NULL;
546   *list = temp;
547 }
548 
549 /* Returns the list of used temp slots at LEVEL.  */
550 
551 static struct temp_slot **
temp_slots_at_level(int level)552 temp_slots_at_level (int level)
553 {
554   if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
555     {
556       size_t old_length = VEC_length (temp_slot_p, used_temp_slots);
557       temp_slot_p *p;
558 
559       VEC_safe_grow (temp_slot_p, gc, used_temp_slots, level + 1);
560       p = VEC_address (temp_slot_p, used_temp_slots);
561       memset (&p[old_length], 0,
562 	      sizeof (temp_slot_p) * (level + 1 - old_length));
563     }
564 
565   return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
566 }
567 
568 /* Returns the maximal temporary slot level.  */
569 
570 static int
max_slot_level(void)571 max_slot_level (void)
572 {
573   if (!used_temp_slots)
574     return -1;
575 
576   return VEC_length (temp_slot_p, used_temp_slots) - 1;
577 }
578 
579 /* Moves temporary slot TEMP to LEVEL.  */
580 
581 static void
move_slot_to_level(struct temp_slot * temp,int level)582 move_slot_to_level (struct temp_slot *temp, int level)
583 {
584   cut_slot_from_list (temp, temp_slots_at_level (temp->level));
585   insert_slot_to_list (temp, temp_slots_at_level (level));
586   temp->level = level;
587 }
588 
589 /* Make temporary slot TEMP available.  */
590 
591 static void
make_slot_available(struct temp_slot * temp)592 make_slot_available (struct temp_slot *temp)
593 {
594   cut_slot_from_list (temp, temp_slots_at_level (temp->level));
595   insert_slot_to_list (temp, &avail_temp_slots);
596   temp->in_use = 0;
597   temp->level = -1;
598 }
599 
600 /* Allocate a temporary stack slot and record it for possible later
601    reuse.
602 
603    MODE is the machine mode to be given to the returned rtx.
604 
605    SIZE is the size in units of the space required.  We do no rounding here
606    since assign_stack_local will do any required rounding.
607 
608    KEEP is 1 if this slot is to be retained after a call to
609    free_temp_slots.  Automatic variables for a block are allocated
610    with this flag.  KEEP values of 2 or 3 were needed respectively
611    for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
612    or for SAVE_EXPRs, but they are now unused.
613 
614    TYPE is the type that will be used for the stack slot.  */
615 
616 rtx
assign_stack_temp_for_type(enum machine_mode mode,HOST_WIDE_INT size,int keep,tree type)617 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
618 			    int keep, tree type)
619 {
620   unsigned int align;
621   struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
622   rtx slot;
623 
624   /* If SIZE is -1 it means that somebody tried to allocate a temporary
625      of a variable size.  */
626   gcc_assert (size != -1);
627 
628   /* These are now unused.  */
629   gcc_assert (keep <= 1);
630 
631   if (mode == BLKmode)
632     align = BIGGEST_ALIGNMENT;
633   else
634     align = GET_MODE_ALIGNMENT (mode);
635 
636   if (! type)
637     type = lang_hooks.types.type_for_mode (mode, 0);
638 
639   if (type)
640     align = LOCAL_ALIGNMENT (type, align);
641 
642   /* Try to find an available, already-allocated temporary of the proper
643      mode which meets the size and alignment requirements.  Choose the
644      smallest one with the closest alignment.
645 
646      If assign_stack_temp is called outside of the tree->rtl expansion,
647      we cannot reuse the stack slots (that may still refer to
648      VIRTUAL_STACK_VARS_REGNUM).  */
649   if (!virtuals_instantiated)
650     {
651       for (p = avail_temp_slots; p; p = p->next)
652 	{
653 	  if (p->align >= align && p->size >= size
654 	      && GET_MODE (p->slot) == mode
655 	      && objects_must_conflict_p (p->type, type)
656 	      && (best_p == 0 || best_p->size > p->size
657 		  || (best_p->size == p->size && best_p->align > p->align)))
658 	    {
659 	      if (p->align == align && p->size == size)
660 		{
661 		  selected = p;
662 		  cut_slot_from_list (selected, &avail_temp_slots);
663 		  best_p = 0;
664 		  break;
665 		}
666 	      best_p = p;
667 	    }
668 	}
669     }
670 
671   /* Make our best, if any, the one to use.  */
672   if (best_p)
673     {
674       selected = best_p;
675       cut_slot_from_list (selected, &avail_temp_slots);
676 
677       /* If there are enough aligned bytes left over, make them into a new
678 	 temp_slot so that the extra bytes don't get wasted.  Do this only
679 	 for BLKmode slots, so that we can be sure of the alignment.  */
680       if (GET_MODE (best_p->slot) == BLKmode)
681 	{
682 	  int alignment = best_p->align / BITS_PER_UNIT;
683 	  HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
684 
685 	  if (best_p->size - rounded_size >= alignment)
686 	    {
687 	      p = ggc_alloc (sizeof (struct temp_slot));
688 	      p->in_use = p->addr_taken = 0;
689 	      p->size = best_p->size - rounded_size;
690 	      p->base_offset = best_p->base_offset + rounded_size;
691 	      p->full_size = best_p->full_size - rounded_size;
692 	      p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
693 	      p->align = best_p->align;
694 	      p->address = 0;
695 	      p->type = best_p->type;
696 	      insert_slot_to_list (p, &avail_temp_slots);
697 
698 	      stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
699 						   stack_slot_list);
700 
701 	      best_p->size = rounded_size;
702 	      best_p->full_size = rounded_size;
703 	    }
704 	}
705     }
706 
707   /* If we still didn't find one, make a new temporary.  */
708   if (selected == 0)
709     {
710       HOST_WIDE_INT frame_offset_old = frame_offset;
711 
712       p = ggc_alloc (sizeof (struct temp_slot));
713 
714       /* We are passing an explicit alignment request to assign_stack_local.
715 	 One side effect of that is assign_stack_local will not round SIZE
716 	 to ensure the frame offset remains suitably aligned.
717 
718 	 So for requests which depended on the rounding of SIZE, we go ahead
719 	 and round it now.  We also make sure ALIGNMENT is at least
720 	 BIGGEST_ALIGNMENT.  */
721       gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
722       p->slot = assign_stack_local (mode,
723 				    (mode == BLKmode
724 				     ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
725 				     : size),
726 				    align);
727 
728       p->align = align;
729 
730       /* The following slot size computation is necessary because we don't
731 	 know the actual size of the temporary slot until assign_stack_local
732 	 has performed all the frame alignment and size rounding for the
733 	 requested temporary.  Note that extra space added for alignment
734 	 can be either above or below this stack slot depending on which
735 	 way the frame grows.  We include the extra space if and only if it
736 	 is above this slot.  */
737       if (FRAME_GROWS_DOWNWARD)
738 	p->size = frame_offset_old - frame_offset;
739       else
740 	p->size = size;
741 
742       /* Now define the fields used by combine_temp_slots.  */
743       if (FRAME_GROWS_DOWNWARD)
744 	{
745 	  p->base_offset = frame_offset;
746 	  p->full_size = frame_offset_old - frame_offset;
747 	}
748       else
749 	{
750 	  p->base_offset = frame_offset_old;
751 	  p->full_size = frame_offset - frame_offset_old;
752 	}
753       p->address = 0;
754 
755       selected = p;
756     }
757 
758   p = selected;
759   p->in_use = 1;
760   p->addr_taken = 0;
761   p->type = type;
762   p->level = temp_slot_level;
763   p->keep = keep;
764 
765   pp = temp_slots_at_level (p->level);
766   insert_slot_to_list (p, pp);
767 
768   /* Create a new MEM rtx to avoid clobbering MEM flags of old slots.  */
769   slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
770   stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
771 
772   /* If we know the alias set for the memory that will be used, use
773      it.  If there's no TYPE, then we don't know anything about the
774      alias set for the memory.  */
775   set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
776   set_mem_align (slot, align);
777 
778   /* If a type is specified, set the relevant flags.  */
779   if (type != 0)
780     {
781       MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
782       MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
783     }
784   MEM_NOTRAP_P (slot) = 1;
785 
786   return slot;
787 }
788 
789 /* Allocate a temporary stack slot and record it for possible later
790    reuse.  First three arguments are same as in preceding function.  */
791 
792 rtx
assign_stack_temp(enum machine_mode mode,HOST_WIDE_INT size,int keep)793 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
794 {
795   return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
796 }
797 
798 /* Assign a temporary.
799    If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
800    and so that should be used in error messages.  In either case, we
801    allocate of the given type.
802    KEEP is as for assign_stack_temp.
803    MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
804    it is 0 if a register is OK.
805    DONT_PROMOTE is 1 if we should not promote values in register
806    to wider modes.  */
807 
808 rtx
assign_temp(tree type_or_decl,int keep,int memory_required,int dont_promote ATTRIBUTE_UNUSED)809 assign_temp (tree type_or_decl, int keep, int memory_required,
810 	     int dont_promote ATTRIBUTE_UNUSED)
811 {
812   tree type, decl;
813   enum machine_mode mode;
814 #ifdef PROMOTE_MODE
815   int unsignedp;
816 #endif
817 
818   if (DECL_P (type_or_decl))
819     decl = type_or_decl, type = TREE_TYPE (decl);
820   else
821     decl = NULL, type = type_or_decl;
822 
823   mode = TYPE_MODE (type);
824 #ifdef PROMOTE_MODE
825   unsignedp = TYPE_UNSIGNED (type);
826 #endif
827 
828   if (mode == BLKmode || memory_required)
829     {
830       HOST_WIDE_INT size = int_size_in_bytes (type);
831       rtx tmp;
832 
833       /* Zero sized arrays are GNU C extension.  Set size to 1 to avoid
834 	 problems with allocating the stack space.  */
835       if (size == 0)
836 	size = 1;
837 
838       /* Unfortunately, we don't yet know how to allocate variable-sized
839 	 temporaries.  However, sometimes we can find a fixed upper limit on
840 	 the size, so try that instead.  */
841       else if (size == -1)
842 	size = max_int_size_in_bytes (type);
843 
844       /* The size of the temporary may be too large to fit into an integer.  */
845       /* ??? Not sure this should happen except for user silliness, so limit
846 	 this to things that aren't compiler-generated temporaries.  The
847 	 rest of the time we'll die in assign_stack_temp_for_type.  */
848       if (decl && size == -1
849 	  && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
850 	{
851 	  error ("size of variable %q+D is too large", decl);
852 	  size = 1;
853 	}
854 
855       tmp = assign_stack_temp_for_type (mode, size, keep, type);
856       return tmp;
857     }
858 
859 #ifdef PROMOTE_MODE
860   if (! dont_promote)
861     mode = promote_mode (type, mode, &unsignedp, 0);
862 #endif
863 
864   return gen_reg_rtx (mode);
865 }
866 
867 /* Combine temporary stack slots which are adjacent on the stack.
868 
869    This allows for better use of already allocated stack space.  This is only
870    done for BLKmode slots because we can be sure that we won't have alignment
871    problems in this case.  */
872 
873 static void
combine_temp_slots(void)874 combine_temp_slots (void)
875 {
876   struct temp_slot *p, *q, *next, *next_q;
877   int num_slots;
878 
879   /* We can't combine slots, because the information about which slot
880      is in which alias set will be lost.  */
881   if (flag_strict_aliasing)
882     return;
883 
884   /* If there are a lot of temp slots, don't do anything unless
885      high levels of optimization.  */
886   if (! flag_expensive_optimizations)
887     for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
888       if (num_slots > 100 || (num_slots > 10 && optimize == 0))
889 	return;
890 
891   for (p = avail_temp_slots; p; p = next)
892     {
893       int delete_p = 0;
894 
895       next = p->next;
896 
897       if (GET_MODE (p->slot) != BLKmode)
898 	continue;
899 
900       for (q = p->next; q; q = next_q)
901 	{
902        	  int delete_q = 0;
903 
904 	  next_q = q->next;
905 
906 	  if (GET_MODE (q->slot) != BLKmode)
907 	    continue;
908 
909 	  if (p->base_offset + p->full_size == q->base_offset)
910 	    {
911 	      /* Q comes after P; combine Q into P.  */
912 	      p->size += q->size;
913 	      p->full_size += q->full_size;
914 	      delete_q = 1;
915 	    }
916 	  else if (q->base_offset + q->full_size == p->base_offset)
917 	    {
918 	      /* P comes after Q; combine P into Q.  */
919 	      q->size += p->size;
920 	      q->full_size += p->full_size;
921 	      delete_p = 1;
922 	      break;
923 	    }
924 	  if (delete_q)
925 	    cut_slot_from_list (q, &avail_temp_slots);
926 	}
927 
928       /* Either delete P or advance past it.  */
929       if (delete_p)
930 	cut_slot_from_list (p, &avail_temp_slots);
931     }
932 }
933 
934 /* Find the temp slot corresponding to the object at address X.  */
935 
936 static struct temp_slot *
find_temp_slot_from_address(rtx x)937 find_temp_slot_from_address (rtx x)
938 {
939   struct temp_slot *p;
940   rtx next;
941   int i;
942 
943   for (i = max_slot_level (); i >= 0; i--)
944     for (p = *temp_slots_at_level (i); p; p = p->next)
945       {
946 	if (XEXP (p->slot, 0) == x
947 	    || p->address == x
948 	    || (GET_CODE (x) == PLUS
949 		&& XEXP (x, 0) == virtual_stack_vars_rtx
950 		&& GET_CODE (XEXP (x, 1)) == CONST_INT
951 		&& INTVAL (XEXP (x, 1)) >= p->base_offset
952 		&& INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
953 	  return p;
954 
955 	else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
956 	  for (next = p->address; next; next = XEXP (next, 1))
957 	    if (XEXP (next, 0) == x)
958 	      return p;
959       }
960 
961   /* If we have a sum involving a register, see if it points to a temp
962      slot.  */
963   if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
964       && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
965     return p;
966   else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
967 	   && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
968     return p;
969 
970   return 0;
971 }
972 
973 /* Indicate that NEW is an alternate way of referring to the temp slot
974    that previously was known by OLD.  */
975 
976 void
update_temp_slot_address(rtx old,rtx new)977 update_temp_slot_address (rtx old, rtx new)
978 {
979   struct temp_slot *p;
980 
981   if (rtx_equal_p (old, new))
982     return;
983 
984   p = find_temp_slot_from_address (old);
985 
986   /* If we didn't find one, see if both OLD is a PLUS.  If so, and NEW
987      is a register, see if one operand of the PLUS is a temporary
988      location.  If so, NEW points into it.  Otherwise, if both OLD and
989      NEW are a PLUS and if there is a register in common between them.
990      If so, try a recursive call on those values.  */
991   if (p == 0)
992     {
993       if (GET_CODE (old) != PLUS)
994 	return;
995 
996       if (REG_P (new))
997 	{
998 	  update_temp_slot_address (XEXP (old, 0), new);
999 	  update_temp_slot_address (XEXP (old, 1), new);
1000 	  return;
1001 	}
1002       else if (GET_CODE (new) != PLUS)
1003 	return;
1004 
1005       if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1006 	update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1007       else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1008 	update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1009       else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1010 	update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1011       else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1012 	update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1013 
1014       return;
1015     }
1016 
1017   /* Otherwise add an alias for the temp's address.  */
1018   else if (p->address == 0)
1019     p->address = new;
1020   else
1021     {
1022       if (GET_CODE (p->address) != EXPR_LIST)
1023 	p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1024 
1025       p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1026     }
1027 }
1028 
1029 /* If X could be a reference to a temporary slot, mark the fact that its
1030    address was taken.  */
1031 
1032 void
mark_temp_addr_taken(rtx x)1033 mark_temp_addr_taken (rtx x)
1034 {
1035   struct temp_slot *p;
1036 
1037   if (x == 0)
1038     return;
1039 
1040   /* If X is not in memory or is at a constant address, it cannot be in
1041      a temporary slot.  */
1042   if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
1043     return;
1044 
1045   p = find_temp_slot_from_address (XEXP (x, 0));
1046   if (p != 0)
1047     p->addr_taken = 1;
1048 }
1049 
1050 /* If X could be a reference to a temporary slot, mark that slot as
1051    belonging to the to one level higher than the current level.  If X
1052    matched one of our slots, just mark that one.  Otherwise, we can't
1053    easily predict which it is, so upgrade all of them.  Kept slots
1054    need not be touched.
1055 
1056    This is called when an ({...}) construct occurs and a statement
1057    returns a value in memory.  */
1058 
1059 void
preserve_temp_slots(rtx x)1060 preserve_temp_slots (rtx x)
1061 {
1062   struct temp_slot *p = 0, *next;
1063 
1064   /* If there is no result, we still might have some objects whose address
1065      were taken, so we need to make sure they stay around.  */
1066   if (x == 0)
1067     {
1068       for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1069 	{
1070 	  next = p->next;
1071 
1072 	  if (p->addr_taken)
1073 	    move_slot_to_level (p, temp_slot_level - 1);
1074 	}
1075 
1076       return;
1077     }
1078 
1079   /* If X is a register that is being used as a pointer, see if we have
1080      a temporary slot we know it points to.  To be consistent with
1081      the code below, we really should preserve all non-kept slots
1082      if we can't find a match, but that seems to be much too costly.  */
1083   if (REG_P (x) && REG_POINTER (x))
1084     p = find_temp_slot_from_address (x);
1085 
1086   /* If X is not in memory or is at a constant address, it cannot be in
1087      a temporary slot, but it can contain something whose address was
1088      taken.  */
1089   if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1090     {
1091       for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1092 	{
1093 	  next = p->next;
1094 
1095 	  if (p->addr_taken)
1096 	    move_slot_to_level (p, temp_slot_level - 1);
1097 	}
1098 
1099       return;
1100     }
1101 
1102   /* First see if we can find a match.  */
1103   if (p == 0)
1104     p = find_temp_slot_from_address (XEXP (x, 0));
1105 
1106   if (p != 0)
1107     {
1108       /* Move everything at our level whose address was taken to our new
1109 	 level in case we used its address.  */
1110       struct temp_slot *q;
1111 
1112       if (p->level == temp_slot_level)
1113 	{
1114 	  for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1115 	    {
1116 	      next = q->next;
1117 
1118 	      if (p != q && q->addr_taken)
1119 		move_slot_to_level (q, temp_slot_level - 1);
1120 	    }
1121 
1122 	  move_slot_to_level (p, temp_slot_level - 1);
1123 	  p->addr_taken = 0;
1124 	}
1125       return;
1126     }
1127 
1128   /* Otherwise, preserve all non-kept slots at this level.  */
1129   for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1130     {
1131       next = p->next;
1132 
1133       if (!p->keep)
1134 	move_slot_to_level (p, temp_slot_level - 1);
1135     }
1136 }
1137 
1138 /* Free all temporaries used so far.  This is normally called at the
1139    end of generating code for a statement.  */
1140 
1141 void
free_temp_slots(void)1142 free_temp_slots (void)
1143 {
1144   struct temp_slot *p, *next;
1145 
1146   for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1147     {
1148       next = p->next;
1149 
1150       if (!p->keep)
1151 	make_slot_available (p);
1152     }
1153 
1154   combine_temp_slots ();
1155 }
1156 
1157 /* Push deeper into the nesting level for stack temporaries.  */
1158 
1159 void
push_temp_slots(void)1160 push_temp_slots (void)
1161 {
1162   temp_slot_level++;
1163 }
1164 
1165 /* Pop a temporary nesting level.  All slots in use in the current level
1166    are freed.  */
1167 
1168 void
pop_temp_slots(void)1169 pop_temp_slots (void)
1170 {
1171   struct temp_slot *p, *next;
1172 
1173   for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1174     {
1175       next = p->next;
1176       make_slot_available (p);
1177     }
1178 
1179   combine_temp_slots ();
1180 
1181   temp_slot_level--;
1182 }
1183 
1184 /* Initialize temporary slots.  */
1185 
1186 void
init_temp_slots(void)1187 init_temp_slots (void)
1188 {
1189   /* We have not allocated any temporaries yet.  */
1190   avail_temp_slots = 0;
1191   used_temp_slots = 0;
1192   temp_slot_level = 0;
1193 }
1194 
1195 /* These routines are responsible for converting virtual register references
1196    to the actual hard register references once RTL generation is complete.
1197 
1198    The following four variables are used for communication between the
1199    routines.  They contain the offsets of the virtual registers from their
1200    respective hard registers.  */
1201 
1202 static int in_arg_offset;
1203 static int var_offset;
1204 static int dynamic_offset;
1205 static int out_arg_offset;
1206 static int cfa_offset;
1207 
1208 /* In most machines, the stack pointer register is equivalent to the bottom
1209    of the stack.  */
1210 
1211 #ifndef STACK_POINTER_OFFSET
1212 #define STACK_POINTER_OFFSET	0
1213 #endif
1214 
1215 /* If not defined, pick an appropriate default for the offset of dynamically
1216    allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1217    REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE.  */
1218 
1219 #ifndef STACK_DYNAMIC_OFFSET
1220 
1221 /* The bottom of the stack points to the actual arguments.  If
1222    REG_PARM_STACK_SPACE is defined, this includes the space for the register
1223    parameters.  However, if OUTGOING_REG_PARM_STACK space is not defined,
1224    stack space for register parameters is not pushed by the caller, but
1225    rather part of the fixed stack areas and hence not included in
1226    `current_function_outgoing_args_size'.  Nevertheless, we must allow
1227    for it when allocating stack dynamic objects.  */
1228 
1229 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1230 #define STACK_DYNAMIC_OFFSET(FNDECL)	\
1231 ((ACCUMULATE_OUTGOING_ARGS						      \
1232   ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
1233  + (STACK_POINTER_OFFSET))						      \
1234 
1235 #else
1236 #define STACK_DYNAMIC_OFFSET(FNDECL)	\
1237 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0)	      \
1238  + (STACK_POINTER_OFFSET))
1239 #endif
1240 #endif
1241 
1242 
1243 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1244    is a virtual register, return the equivalent hard register and set the
1245    offset indirectly through the pointer.  Otherwise, return 0.  */
1246 
1247 static rtx
instantiate_new_reg(rtx x,HOST_WIDE_INT * poffset)1248 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1249 {
1250   rtx new;
1251   HOST_WIDE_INT offset;
1252 
1253   if (x == virtual_incoming_args_rtx)
1254     new = arg_pointer_rtx, offset = in_arg_offset;
1255   else if (x == virtual_stack_vars_rtx)
1256     new = frame_pointer_rtx, offset = var_offset;
1257   else if (x == virtual_stack_dynamic_rtx)
1258     new = stack_pointer_rtx, offset = dynamic_offset;
1259   else if (x == virtual_outgoing_args_rtx)
1260     new = stack_pointer_rtx, offset = out_arg_offset;
1261   else if (x == virtual_cfa_rtx)
1262     {
1263 #ifdef FRAME_POINTER_CFA_OFFSET
1264       new = frame_pointer_rtx;
1265 #else
1266       new = arg_pointer_rtx;
1267 #endif
1268       offset = cfa_offset;
1269     }
1270   else
1271     return NULL_RTX;
1272 
1273   *poffset = offset;
1274   return new;
1275 }
1276 
1277 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1278    Instantiate any virtual registers present inside of *LOC.  The expression
1279    is simplified, as much as possible, but is not to be considered "valid"
1280    in any sense implied by the target.  If any change is made, set CHANGED
1281    to true.  */
1282 
1283 static int
instantiate_virtual_regs_in_rtx(rtx * loc,void * data)1284 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1285 {
1286   HOST_WIDE_INT offset;
1287   bool *changed = (bool *) data;
1288   rtx x, new;
1289 
1290   x = *loc;
1291   if (x == 0)
1292     return 0;
1293 
1294   switch (GET_CODE (x))
1295     {
1296     case REG:
1297       new = instantiate_new_reg (x, &offset);
1298       if (new)
1299 	{
1300 	  *loc = plus_constant (new, offset);
1301 	  if (changed)
1302 	    *changed = true;
1303 	}
1304       return -1;
1305 
1306     case PLUS:
1307       new = instantiate_new_reg (XEXP (x, 0), &offset);
1308       if (new)
1309 	{
1310 	  new = plus_constant (new, offset);
1311 	  *loc = simplify_gen_binary (PLUS, GET_MODE (x), new, XEXP (x, 1));
1312 	  if (changed)
1313 	    *changed = true;
1314 	  return -1;
1315 	}
1316 
1317       /* FIXME -- from old code */
1318 	  /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1319 	     we can commute the PLUS and SUBREG because pointers into the
1320 	     frame are well-behaved.  */
1321       break;
1322 
1323     default:
1324       break;
1325     }
1326 
1327   return 0;
1328 }
1329 
1330 /* A subroutine of instantiate_virtual_regs_in_insn.  Return true if X
1331    matches the predicate for insn CODE operand OPERAND.  */
1332 
1333 static int
safe_insn_predicate(int code,int operand,rtx x)1334 safe_insn_predicate (int code, int operand, rtx x)
1335 {
1336   const struct insn_operand_data *op_data;
1337 
1338   if (code < 0)
1339     return true;
1340 
1341   op_data = &insn_data[code].operand[operand];
1342   if (op_data->predicate == NULL)
1343     return true;
1344 
1345   return op_data->predicate (x, op_data->mode);
1346 }
1347 
1348 /* A subroutine of instantiate_virtual_regs.  Instantiate any virtual
1349    registers present inside of insn.  The result will be a valid insn.  */
1350 
1351 static void
instantiate_virtual_regs_in_insn(rtx insn)1352 instantiate_virtual_regs_in_insn (rtx insn)
1353 {
1354   HOST_WIDE_INT offset;
1355   int insn_code, i;
1356   bool any_change = false;
1357   rtx set, new, x, seq;
1358 
1359   /* There are some special cases to be handled first.  */
1360   set = single_set (insn);
1361   if (set)
1362     {
1363       /* We're allowed to assign to a virtual register.  This is interpreted
1364 	 to mean that the underlying register gets assigned the inverse
1365 	 transformation.  This is used, for example, in the handling of
1366 	 non-local gotos.  */
1367       new = instantiate_new_reg (SET_DEST (set), &offset);
1368       if (new)
1369 	{
1370 	  start_sequence ();
1371 
1372 	  for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1373 	  x = simplify_gen_binary (PLUS, GET_MODE (new), SET_SRC (set),
1374 				   GEN_INT (-offset));
1375 	  x = force_operand (x, new);
1376 	  if (x != new)
1377 	    emit_move_insn (new, x);
1378 
1379 	  seq = get_insns ();
1380 	  end_sequence ();
1381 
1382 	  emit_insn_before (seq, insn);
1383 	  delete_insn (insn);
1384 	  return;
1385 	}
1386 
1387       /* Handle a straight copy from a virtual register by generating a
1388 	 new add insn.  The difference between this and falling through
1389 	 to the generic case is avoiding a new pseudo and eliminating a
1390 	 move insn in the initial rtl stream.  */
1391       new = instantiate_new_reg (SET_SRC (set), &offset);
1392       if (new && offset != 0
1393 	  && REG_P (SET_DEST (set))
1394 	  && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1395 	{
1396 	  start_sequence ();
1397 
1398 	  x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1399 				   new, GEN_INT (offset), SET_DEST (set),
1400 				   1, OPTAB_LIB_WIDEN);
1401 	  if (x != SET_DEST (set))
1402 	    emit_move_insn (SET_DEST (set), x);
1403 
1404 	  seq = get_insns ();
1405 	  end_sequence ();
1406 
1407 	  emit_insn_before (seq, insn);
1408 	  delete_insn (insn);
1409 	  return;
1410 	}
1411 
1412       extract_insn (insn);
1413       insn_code = INSN_CODE (insn);
1414 
1415       /* Handle a plus involving a virtual register by determining if the
1416 	 operands remain valid if they're modified in place.  */
1417       if (GET_CODE (SET_SRC (set)) == PLUS
1418 	  && recog_data.n_operands >= 3
1419 	  && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1420 	  && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1421 	  && GET_CODE (recog_data.operand[2]) == CONST_INT
1422 	  && (new = instantiate_new_reg (recog_data.operand[1], &offset)))
1423 	{
1424 	  offset += INTVAL (recog_data.operand[2]);
1425 
1426 	  /* If the sum is zero, then replace with a plain move.  */
1427 	  if (offset == 0
1428 	      && REG_P (SET_DEST (set))
1429 	      && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1430 	    {
1431 	      start_sequence ();
1432 	      emit_move_insn (SET_DEST (set), new);
1433 	      seq = get_insns ();
1434 	      end_sequence ();
1435 
1436 	      emit_insn_before (seq, insn);
1437 	      delete_insn (insn);
1438 	      return;
1439 	    }
1440 
1441 	  x = gen_int_mode (offset, recog_data.operand_mode[2]);
1442 
1443 	  /* Using validate_change and apply_change_group here leaves
1444 	     recog_data in an invalid state.  Since we know exactly what
1445 	     we want to check, do those two by hand.  */
1446 	  if (safe_insn_predicate (insn_code, 1, new)
1447 	      && safe_insn_predicate (insn_code, 2, x))
1448 	    {
1449 	      *recog_data.operand_loc[1] = recog_data.operand[1] = new;
1450 	      *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1451 	      any_change = true;
1452 
1453 	      /* Fall through into the regular operand fixup loop in
1454 		 order to take care of operands other than 1 and 2.  */
1455 	    }
1456 	}
1457     }
1458   else
1459     {
1460       extract_insn (insn);
1461       insn_code = INSN_CODE (insn);
1462     }
1463 
1464   /* In the general case, we expect virtual registers to appear only in
1465      operands, and then only as either bare registers or inside memories.  */
1466   for (i = 0; i < recog_data.n_operands; ++i)
1467     {
1468       x = recog_data.operand[i];
1469       switch (GET_CODE (x))
1470 	{
1471 	case MEM:
1472 	  {
1473 	    rtx addr = XEXP (x, 0);
1474 	    bool changed = false;
1475 
1476 	    for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1477 	    if (!changed)
1478 	      continue;
1479 
1480 	    start_sequence ();
1481 	    x = replace_equiv_address (x, addr);
1482 	    seq = get_insns ();
1483 	    end_sequence ();
1484 	    if (seq)
1485 	      emit_insn_before (seq, insn);
1486 	  }
1487 	  break;
1488 
1489 	case REG:
1490 	  new = instantiate_new_reg (x, &offset);
1491 	  if (new == NULL)
1492 	    continue;
1493 	  if (offset == 0)
1494 	    x = new;
1495 	  else
1496 	    {
1497 	      start_sequence ();
1498 
1499 	      /* Careful, special mode predicates may have stuff in
1500 		 insn_data[insn_code].operand[i].mode that isn't useful
1501 		 to us for computing a new value.  */
1502 	      /* ??? Recognize address_operand and/or "p" constraints
1503 		 to see if (plus new offset) is a valid before we put
1504 		 this through expand_simple_binop.  */
1505 	      x = expand_simple_binop (GET_MODE (x), PLUS, new,
1506 				       GEN_INT (offset), NULL_RTX,
1507 				       1, OPTAB_LIB_WIDEN);
1508 	      seq = get_insns ();
1509 	      end_sequence ();
1510 	      emit_insn_before (seq, insn);
1511 	    }
1512 	  break;
1513 
1514 	case SUBREG:
1515 	  new = instantiate_new_reg (SUBREG_REG (x), &offset);
1516 	  if (new == NULL)
1517 	    continue;
1518 	  if (offset != 0)
1519 	    {
1520 	      start_sequence ();
1521 	      new = expand_simple_binop (GET_MODE (new), PLUS, new,
1522 					 GEN_INT (offset), NULL_RTX,
1523 					 1, OPTAB_LIB_WIDEN);
1524 	      seq = get_insns ();
1525 	      end_sequence ();
1526 	      emit_insn_before (seq, insn);
1527 	    }
1528 	  x = simplify_gen_subreg (recog_data.operand_mode[i], new,
1529 				   GET_MODE (new), SUBREG_BYTE (x));
1530 	  break;
1531 
1532 	default:
1533 	  continue;
1534 	}
1535 
1536       /* At this point, X contains the new value for the operand.
1537 	 Validate the new value vs the insn predicate.  Note that
1538 	 asm insns will have insn_code -1 here.  */
1539       if (!safe_insn_predicate (insn_code, i, x))
1540 	{
1541 	  start_sequence ();
1542 	  x = force_reg (insn_data[insn_code].operand[i].mode, x);
1543 	  seq = get_insns ();
1544 	  end_sequence ();
1545 	  if (seq)
1546 	    emit_insn_before (seq, insn);
1547 	}
1548 
1549       *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1550       any_change = true;
1551     }
1552 
1553   if (any_change)
1554     {
1555       /* Propagate operand changes into the duplicates.  */
1556       for (i = 0; i < recog_data.n_dups; ++i)
1557 	*recog_data.dup_loc[i]
1558 	  = recog_data.operand[(unsigned)recog_data.dup_num[i]];
1559 
1560       /* Force re-recognition of the instruction for validation.  */
1561       INSN_CODE (insn) = -1;
1562     }
1563 
1564   if (asm_noperands (PATTERN (insn)) >= 0)
1565     {
1566       if (!check_asm_operands (PATTERN (insn)))
1567 	{
1568 	  error_for_asm (insn, "impossible constraint in %<asm%>");
1569 	  delete_insn (insn);
1570 	}
1571     }
1572   else
1573     {
1574       if (recog_memoized (insn) < 0)
1575 	fatal_insn_not_found (insn);
1576     }
1577 }
1578 
1579 /* Subroutine of instantiate_decls.  Given RTL representing a decl,
1580    do any instantiation required.  */
1581 
1582 static void
instantiate_decl(rtx x)1583 instantiate_decl (rtx x)
1584 {
1585   rtx addr;
1586 
1587   if (x == 0)
1588     return;
1589 
1590   /* If this is a CONCAT, recurse for the pieces.  */
1591   if (GET_CODE (x) == CONCAT)
1592     {
1593       instantiate_decl (XEXP (x, 0));
1594       instantiate_decl (XEXP (x, 1));
1595       return;
1596     }
1597 
1598   /* If this is not a MEM, no need to do anything.  Similarly if the
1599      address is a constant or a register that is not a virtual register.  */
1600   if (!MEM_P (x))
1601     return;
1602 
1603   addr = XEXP (x, 0);
1604   if (CONSTANT_P (addr)
1605       || (REG_P (addr)
1606 	  && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1607 	      || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1608     return;
1609 
1610   for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1611 }
1612 
1613 /* Helper for instantiate_decls called via walk_tree: Process all decls
1614    in the given DECL_VALUE_EXPR.  */
1615 
1616 static tree
instantiate_expr(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)1617 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1618 {
1619   tree t = *tp;
1620   if (! EXPR_P (t))
1621     {
1622       *walk_subtrees = 0;
1623       if (DECL_P (t) && DECL_RTL_SET_P (t))
1624 	instantiate_decl (DECL_RTL (t));
1625     }
1626   return NULL;
1627 }
1628 
1629 /* Subroutine of instantiate_decls: Process all decls in the given
1630    BLOCK node and all its subblocks.  */
1631 
1632 static void
instantiate_decls_1(tree let)1633 instantiate_decls_1 (tree let)
1634 {
1635   tree t;
1636 
1637   for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1638     {
1639       if (DECL_RTL_SET_P (t))
1640 	instantiate_decl (DECL_RTL (t));
1641       if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1642 	{
1643 	  tree v = DECL_VALUE_EXPR (t);
1644 	  walk_tree (&v, instantiate_expr, NULL, NULL);
1645 	}
1646     }
1647 
1648   /* Process all subblocks.  */
1649   for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1650     instantiate_decls_1 (t);
1651 }
1652 
1653 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1654    all virtual registers in their DECL_RTL's.  */
1655 
1656 static void
instantiate_decls(tree fndecl)1657 instantiate_decls (tree fndecl)
1658 {
1659   tree decl;
1660 
1661   /* Process all parameters of the function.  */
1662   for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1663     {
1664       instantiate_decl (DECL_RTL (decl));
1665       instantiate_decl (DECL_INCOMING_RTL (decl));
1666       if (DECL_HAS_VALUE_EXPR_P (decl))
1667 	{
1668 	  tree v = DECL_VALUE_EXPR (decl);
1669 	  walk_tree (&v, instantiate_expr, NULL, NULL);
1670 	}
1671     }
1672 
1673   /* Now process all variables defined in the function or its subblocks.  */
1674   instantiate_decls_1 (DECL_INITIAL (fndecl));
1675 }
1676 
1677 /* Pass through the INSNS of function FNDECL and convert virtual register
1678    references to hard register references.  */
1679 
1680 static unsigned int
instantiate_virtual_regs(void)1681 instantiate_virtual_regs (void)
1682 {
1683   rtx insn;
1684 
1685   /* Compute the offsets to use for this function.  */
1686   in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1687   var_offset = STARTING_FRAME_OFFSET;
1688   dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1689   out_arg_offset = STACK_POINTER_OFFSET;
1690 #ifdef FRAME_POINTER_CFA_OFFSET
1691   cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1692 #else
1693   cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1694 #endif
1695 
1696   /* Initialize recognition, indicating that volatile is OK.  */
1697   init_recog ();
1698 
1699   /* Scan through all the insns, instantiating every virtual register still
1700      present.  */
1701   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1702     if (INSN_P (insn))
1703       {
1704 	/* These patterns in the instruction stream can never be recognized.
1705 	   Fortunately, they shouldn't contain virtual registers either.  */
1706 	if (GET_CODE (PATTERN (insn)) == USE
1707 	    || GET_CODE (PATTERN (insn)) == CLOBBER
1708 	    || GET_CODE (PATTERN (insn)) == ADDR_VEC
1709 	    || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1710 	    || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1711 	  continue;
1712 
1713 	instantiate_virtual_regs_in_insn (insn);
1714 
1715 	if (INSN_DELETED_P (insn))
1716 	  continue;
1717 
1718 	for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1719 
1720 	/* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE.  */
1721 	if (GET_CODE (insn) == CALL_INSN)
1722 	  for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1723 			instantiate_virtual_regs_in_rtx, NULL);
1724       }
1725 
1726   /* Instantiate the virtual registers in the DECLs for debugging purposes.  */
1727   instantiate_decls (current_function_decl);
1728 
1729   /* Indicate that, from now on, assign_stack_local should use
1730      frame_pointer_rtx.  */
1731   virtuals_instantiated = 1;
1732   return 0;
1733 }
1734 
1735 struct tree_opt_pass pass_instantiate_virtual_regs =
1736 {
1737   "vregs",                              /* name */
1738   NULL,                                 /* gate */
1739   instantiate_virtual_regs,             /* execute */
1740   NULL,                                 /* sub */
1741   NULL,                                 /* next */
1742   0,                                    /* static_pass_number */
1743   0,                                    /* tv_id */
1744   0,                                    /* properties_required */
1745   0,                                    /* properties_provided */
1746   0,                                    /* properties_destroyed */
1747   0,                                    /* todo_flags_start */
1748   TODO_dump_func,                       /* todo_flags_finish */
1749   0                                     /* letter */
1750 };
1751 
1752 
1753 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1754    This means a type for which function calls must pass an address to the
1755    function or get an address back from the function.
1756    EXP may be a type node or an expression (whose type is tested).  */
1757 
1758 int
aggregate_value_p(tree exp,tree fntype)1759 aggregate_value_p (tree exp, tree fntype)
1760 {
1761   int i, regno, nregs;
1762   rtx reg;
1763 
1764   tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1765 
1766   /* DECL node associated with FNTYPE when relevant, which we might need to
1767      check for by-invisible-reference returns, typically for CALL_EXPR input
1768      EXPressions.  */
1769   tree fndecl = NULL_TREE;
1770 
1771   if (fntype)
1772     switch (TREE_CODE (fntype))
1773       {
1774       case CALL_EXPR:
1775 	fndecl = get_callee_fndecl (fntype);
1776 	fntype = fndecl ? TREE_TYPE (fndecl) : 0;
1777 	break;
1778       case FUNCTION_DECL:
1779 	fndecl = fntype;
1780 	fntype = TREE_TYPE (fndecl);
1781 	break;
1782       case FUNCTION_TYPE:
1783       case METHOD_TYPE:
1784         break;
1785       case IDENTIFIER_NODE:
1786 	fntype = 0;
1787 	break;
1788       default:
1789 	/* We don't expect other rtl types here.  */
1790 	gcc_unreachable ();
1791       }
1792 
1793   if (TREE_CODE (type) == VOID_TYPE)
1794     return 0;
1795 
1796   /* If the front end has decided that this needs to be passed by
1797      reference, do so.  */
1798   if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
1799       && DECL_BY_REFERENCE (exp))
1800     return 1;
1801 
1802   /* If the EXPression is a CALL_EXPR, honor DECL_BY_REFERENCE set on the
1803      called function RESULT_DECL, meaning the function returns in memory by
1804      invisible reference.  This check lets front-ends not set TREE_ADDRESSABLE
1805      on the function type, which used to be the way to request such a return
1806      mechanism but might now be causing troubles at gimplification time if
1807      temporaries with the function type need to be created.  */
1808   if (TREE_CODE (exp) == CALL_EXPR && fndecl && DECL_RESULT (fndecl)
1809       && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
1810     return 1;
1811 
1812   if (targetm.calls.return_in_memory (type, fntype))
1813     return 1;
1814   /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1815      and thus can't be returned in registers.  */
1816   if (TREE_ADDRESSABLE (type))
1817     return 1;
1818   if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
1819     return 1;
1820   /* Make sure we have suitable call-clobbered regs to return
1821      the value in; if not, we must return it in memory.  */
1822   reg = hard_function_value (type, 0, fntype, 0);
1823 
1824   /* If we have something other than a REG (e.g. a PARALLEL), then assume
1825      it is OK.  */
1826   if (!REG_P (reg))
1827     return 0;
1828 
1829   regno = REGNO (reg);
1830   nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
1831   for (i = 0; i < nregs; i++)
1832     if (! call_used_regs[regno + i])
1833       return 1;
1834   return 0;
1835 }
1836 
1837 /* Return true if we should assign DECL a pseudo register; false if it
1838    should live on the local stack.  */
1839 
1840 bool
use_register_for_decl(tree decl)1841 use_register_for_decl (tree decl)
1842 {
1843   /* Honor volatile.  */
1844   if (TREE_SIDE_EFFECTS (decl))
1845     return false;
1846 
1847   /* Honor addressability.  */
1848   if (TREE_ADDRESSABLE (decl))
1849     return false;
1850 
1851   /* Only register-like things go in registers.  */
1852   if (DECL_MODE (decl) == BLKmode)
1853     return false;
1854 
1855   /* If -ffloat-store specified, don't put explicit float variables
1856      into registers.  */
1857   /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1858      propagates values across these stores, and it probably shouldn't.  */
1859   if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
1860     return false;
1861 
1862   /* If we're not interested in tracking debugging information for
1863      this decl, then we can certainly put it in a register.  */
1864   if (DECL_IGNORED_P (decl))
1865     return true;
1866 
1867   return (optimize || DECL_REGISTER (decl));
1868 }
1869 
1870 /* Return true if TYPE should be passed by invisible reference.  */
1871 
1872 bool
pass_by_reference(CUMULATIVE_ARGS * ca,enum machine_mode mode,tree type,bool named_arg)1873 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1874 		   tree type, bool named_arg)
1875 {
1876   if (type)
1877     {
1878       /* If this type contains non-trivial constructors, then it is
1879 	 forbidden for the middle-end to create any new copies.  */
1880       if (TREE_ADDRESSABLE (type))
1881 	return true;
1882 
1883       /* GCC post 3.4 passes *all* variable sized types by reference.  */
1884       if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1885 	return true;
1886     }
1887 
1888   return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
1889 }
1890 
1891 /* Return true if TYPE, which is passed by reference, should be callee
1892    copied instead of caller copied.  */
1893 
1894 bool
reference_callee_copied(CUMULATIVE_ARGS * ca,enum machine_mode mode,tree type,bool named_arg)1895 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1896 			 tree type, bool named_arg)
1897 {
1898   if (type && TREE_ADDRESSABLE (type))
1899     return false;
1900   return targetm.calls.callee_copies (ca, mode, type, named_arg);
1901 }
1902 
1903 /* Structures to communicate between the subroutines of assign_parms.
1904    The first holds data persistent across all parameters, the second
1905    is cleared out for each parameter.  */
1906 
1907 struct assign_parm_data_all
1908 {
1909   CUMULATIVE_ARGS args_so_far;
1910   struct args_size stack_args_size;
1911   tree function_result_decl;
1912   tree orig_fnargs;
1913   rtx conversion_insns;
1914   HOST_WIDE_INT pretend_args_size;
1915   HOST_WIDE_INT extra_pretend_bytes;
1916   int reg_parm_stack_space;
1917 };
1918 
1919 struct assign_parm_data_one
1920 {
1921   tree nominal_type;
1922   tree passed_type;
1923   rtx entry_parm;
1924   rtx stack_parm;
1925   enum machine_mode nominal_mode;
1926   enum machine_mode passed_mode;
1927   enum machine_mode promoted_mode;
1928   struct locate_and_pad_arg_data locate;
1929   int partial;
1930   BOOL_BITFIELD named_arg : 1;
1931   BOOL_BITFIELD passed_pointer : 1;
1932   BOOL_BITFIELD on_stack : 1;
1933   BOOL_BITFIELD loaded_in_reg : 1;
1934 };
1935 
1936 /* A subroutine of assign_parms.  Initialize ALL.  */
1937 
1938 static void
assign_parms_initialize_all(struct assign_parm_data_all * all)1939 assign_parms_initialize_all (struct assign_parm_data_all *all)
1940 {
1941   tree fntype;
1942 
1943   memset (all, 0, sizeof (*all));
1944 
1945   fntype = TREE_TYPE (current_function_decl);
1946 
1947 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
1948   INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
1949 #else
1950   INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
1951 			current_function_decl, -1);
1952 #endif
1953 
1954 #ifdef REG_PARM_STACK_SPACE
1955   all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
1956 #endif
1957 }
1958 
1959 /* If ARGS contains entries with complex types, split the entry into two
1960    entries of the component type.  Return a new list of substitutions are
1961    needed, else the old list.  */
1962 
1963 static tree
split_complex_args(tree args)1964 split_complex_args (tree args)
1965 {
1966   tree p;
1967 
1968   /* Before allocating memory, check for the common case of no complex.  */
1969   for (p = args; p; p = TREE_CHAIN (p))
1970     {
1971       tree type = TREE_TYPE (p);
1972       if (TREE_CODE (type) == COMPLEX_TYPE
1973 	  && targetm.calls.split_complex_arg (type))
1974         goto found;
1975     }
1976   return args;
1977 
1978  found:
1979   args = copy_list (args);
1980 
1981   for (p = args; p; p = TREE_CHAIN (p))
1982     {
1983       tree type = TREE_TYPE (p);
1984       if (TREE_CODE (type) == COMPLEX_TYPE
1985 	  && targetm.calls.split_complex_arg (type))
1986 	{
1987 	  tree decl;
1988 	  tree subtype = TREE_TYPE (type);
1989 	  bool addressable = TREE_ADDRESSABLE (p);
1990 
1991 	  /* Rewrite the PARM_DECL's type with its component.  */
1992 	  TREE_TYPE (p) = subtype;
1993 	  DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
1994 	  DECL_MODE (p) = VOIDmode;
1995 	  DECL_SIZE (p) = NULL;
1996 	  DECL_SIZE_UNIT (p) = NULL;
1997 	  /* If this arg must go in memory, put it in a pseudo here.
1998 	     We can't allow it to go in memory as per normal parms,
1999 	     because the usual place might not have the imag part
2000 	     adjacent to the real part.  */
2001 	  DECL_ARTIFICIAL (p) = addressable;
2002 	  DECL_IGNORED_P (p) = addressable;
2003 	  TREE_ADDRESSABLE (p) = 0;
2004 	  layout_decl (p, 0);
2005 
2006 	  /* Build a second synthetic decl.  */
2007 	  decl = build_decl (PARM_DECL, NULL_TREE, subtype);
2008 	  DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2009 	  DECL_ARTIFICIAL (decl) = addressable;
2010 	  DECL_IGNORED_P (decl) = addressable;
2011 	  layout_decl (decl, 0);
2012 
2013 	  /* Splice it in; skip the new decl.  */
2014 	  TREE_CHAIN (decl) = TREE_CHAIN (p);
2015 	  TREE_CHAIN (p) = decl;
2016 	  p = decl;
2017 	}
2018     }
2019 
2020   return args;
2021 }
2022 
2023 /* A subroutine of assign_parms.  Adjust the parameter list to incorporate
2024    the hidden struct return argument, and (abi willing) complex args.
2025    Return the new parameter list.  */
2026 
2027 static tree
assign_parms_augmented_arg_list(struct assign_parm_data_all * all)2028 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2029 {
2030   tree fndecl = current_function_decl;
2031   tree fntype = TREE_TYPE (fndecl);
2032   tree fnargs = DECL_ARGUMENTS (fndecl);
2033 
2034   /* If struct value address is treated as the first argument, make it so.  */
2035   if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2036       && ! current_function_returns_pcc_struct
2037       && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2038     {
2039       tree type = build_pointer_type (TREE_TYPE (fntype));
2040       tree decl;
2041 
2042       decl = build_decl (PARM_DECL, NULL_TREE, type);
2043       DECL_ARG_TYPE (decl) = type;
2044       DECL_ARTIFICIAL (decl) = 1;
2045       DECL_IGNORED_P (decl) = 1;
2046 
2047       TREE_CHAIN (decl) = fnargs;
2048       fnargs = decl;
2049       all->function_result_decl = decl;
2050     }
2051 
2052   all->orig_fnargs = fnargs;
2053 
2054   /* If the target wants to split complex arguments into scalars, do so.  */
2055   if (targetm.calls.split_complex_arg)
2056     fnargs = split_complex_args (fnargs);
2057 
2058   return fnargs;
2059 }
2060 
2061 /* A subroutine of assign_parms.  Examine PARM and pull out type and mode
2062    data for the parameter.  Incorporate ABI specifics such as pass-by-
2063    reference and type promotion.  */
2064 
2065 static void
assign_parm_find_data_types(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)2066 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2067 			     struct assign_parm_data_one *data)
2068 {
2069   tree nominal_type, passed_type;
2070   enum machine_mode nominal_mode, passed_mode, promoted_mode;
2071 
2072   memset (data, 0, sizeof (*data));
2073 
2074   /* NAMED_ARG is a mis-nomer.  We really mean 'non-varadic'. */
2075   if (!current_function_stdarg)
2076     data->named_arg = 1;  /* No varadic parms.  */
2077   else if (TREE_CHAIN (parm))
2078     data->named_arg = 1;  /* Not the last non-varadic parm. */
2079   else if (targetm.calls.strict_argument_naming (&all->args_so_far))
2080     data->named_arg = 1;  /* Only varadic ones are unnamed.  */
2081   else
2082     data->named_arg = 0;  /* Treat as varadic.  */
2083 
2084   nominal_type = TREE_TYPE (parm);
2085   passed_type = DECL_ARG_TYPE (parm);
2086 
2087   /* Look out for errors propagating this far.  Also, if the parameter's
2088      type is void then its value doesn't matter.  */
2089   if (TREE_TYPE (parm) == error_mark_node
2090       /* This can happen after weird syntax errors
2091 	 or if an enum type is defined among the parms.  */
2092       || TREE_CODE (parm) != PARM_DECL
2093       || passed_type == NULL
2094       || VOID_TYPE_P (nominal_type))
2095     {
2096       nominal_type = passed_type = void_type_node;
2097       nominal_mode = passed_mode = promoted_mode = VOIDmode;
2098       goto egress;
2099     }
2100 
2101   /* Find mode of arg as it is passed, and mode of arg as it should be
2102      during execution of this function.  */
2103   passed_mode = TYPE_MODE (passed_type);
2104   nominal_mode = TYPE_MODE (nominal_type);
2105 
2106   /* If the parm is to be passed as a transparent union, use the type of
2107      the first field for the tests below.  We have already verified that
2108      the modes are the same.  */
2109   if (TREE_CODE (passed_type) == UNION_TYPE
2110       && TYPE_TRANSPARENT_UNION (passed_type))
2111     passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2112 
2113   /* See if this arg was passed by invisible reference.  */
2114   if (pass_by_reference (&all->args_so_far, passed_mode,
2115 			 passed_type, data->named_arg))
2116     {
2117       passed_type = nominal_type = build_pointer_type (passed_type);
2118       data->passed_pointer = true;
2119       passed_mode = nominal_mode = Pmode;
2120     }
2121 
2122   /* Find mode as it is passed by the ABI.  */
2123   promoted_mode = passed_mode;
2124   if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2125     {
2126       int unsignedp = TYPE_UNSIGNED (passed_type);
2127       promoted_mode = promote_mode (passed_type, promoted_mode,
2128 				    &unsignedp, 1);
2129     }
2130 
2131  egress:
2132   data->nominal_type = nominal_type;
2133   data->passed_type = passed_type;
2134   data->nominal_mode = nominal_mode;
2135   data->passed_mode = passed_mode;
2136   data->promoted_mode = promoted_mode;
2137 }
2138 
2139 /* A subroutine of assign_parms.  Invoke setup_incoming_varargs.  */
2140 
2141 static void
assign_parms_setup_varargs(struct assign_parm_data_all * all,struct assign_parm_data_one * data,bool no_rtl)2142 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2143 			    struct assign_parm_data_one *data, bool no_rtl)
2144 {
2145   int varargs_pretend_bytes = 0;
2146 
2147   targetm.calls.setup_incoming_varargs (&all->args_so_far,
2148 					data->promoted_mode,
2149 					data->passed_type,
2150 					&varargs_pretend_bytes, no_rtl);
2151 
2152   /* If the back-end has requested extra stack space, record how much is
2153      needed.  Do not change pretend_args_size otherwise since it may be
2154      nonzero from an earlier partial argument.  */
2155   if (varargs_pretend_bytes > 0)
2156     all->pretend_args_size = varargs_pretend_bytes;
2157 }
2158 
2159 /* A subroutine of assign_parms.  Set DATA->ENTRY_PARM corresponding to
2160    the incoming location of the current parameter.  */
2161 
2162 static void
assign_parm_find_entry_rtl(struct assign_parm_data_all * all,struct assign_parm_data_one * data)2163 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2164 			    struct assign_parm_data_one *data)
2165 {
2166   HOST_WIDE_INT pretend_bytes = 0;
2167   rtx entry_parm;
2168   bool in_regs;
2169 
2170   if (data->promoted_mode == VOIDmode)
2171     {
2172       data->entry_parm = data->stack_parm = const0_rtx;
2173       return;
2174     }
2175 
2176 #ifdef FUNCTION_INCOMING_ARG
2177   entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2178 				      data->passed_type, data->named_arg);
2179 #else
2180   entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2181 			     data->passed_type, data->named_arg);
2182 #endif
2183 
2184   if (entry_parm == 0)
2185     data->promoted_mode = data->passed_mode;
2186 
2187   /* Determine parm's home in the stack, in case it arrives in the stack
2188      or we should pretend it did.  Compute the stack position and rtx where
2189      the argument arrives and its size.
2190 
2191      There is one complexity here:  If this was a parameter that would
2192      have been passed in registers, but wasn't only because it is
2193      __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2194      it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2195      In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2196      as it was the previous time.  */
2197   in_regs = entry_parm != 0;
2198 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2199   in_regs = true;
2200 #endif
2201   if (!in_regs && !data->named_arg)
2202     {
2203       if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
2204 	{
2205 	  rtx tem;
2206 #ifdef FUNCTION_INCOMING_ARG
2207 	  tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2208 				       data->passed_type, true);
2209 #else
2210 	  tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2211 			      data->passed_type, true);
2212 #endif
2213 	  in_regs = tem != NULL;
2214 	}
2215     }
2216 
2217   /* If this parameter was passed both in registers and in the stack, use
2218      the copy on the stack.  */
2219   if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2220 					data->passed_type))
2221     entry_parm = 0;
2222 
2223   if (entry_parm)
2224     {
2225       int partial;
2226 
2227       partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
2228 						 data->promoted_mode,
2229 						 data->passed_type,
2230 						 data->named_arg);
2231       data->partial = partial;
2232 
2233       /* The caller might already have allocated stack space for the
2234 	 register parameters.  */
2235       if (partial != 0 && all->reg_parm_stack_space == 0)
2236 	{
2237 	  /* Part of this argument is passed in registers and part
2238 	     is passed on the stack.  Ask the prologue code to extend
2239 	     the stack part so that we can recreate the full value.
2240 
2241 	     PRETEND_BYTES is the size of the registers we need to store.
2242 	     CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2243 	     stack space that the prologue should allocate.
2244 
2245 	     Internally, gcc assumes that the argument pointer is aligned
2246 	     to STACK_BOUNDARY bits.  This is used both for alignment
2247 	     optimizations (see init_emit) and to locate arguments that are
2248 	     aligned to more than PARM_BOUNDARY bits.  We must preserve this
2249 	     invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2250 	     a stack boundary.  */
2251 
2252 	  /* We assume at most one partial arg, and it must be the first
2253 	     argument on the stack.  */
2254 	  gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2255 
2256 	  pretend_bytes = partial;
2257 	  all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2258 
2259 	  /* We want to align relative to the actual stack pointer, so
2260 	     don't include this in the stack size until later.  */
2261 	  all->extra_pretend_bytes = all->pretend_args_size;
2262 	}
2263     }
2264 
2265   locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2266 		       entry_parm ? data->partial : 0, current_function_decl,
2267 		       &all->stack_args_size, &data->locate);
2268 
2269   /* Adjust offsets to include the pretend args.  */
2270   pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2271   data->locate.slot_offset.constant += pretend_bytes;
2272   data->locate.offset.constant += pretend_bytes;
2273 
2274   data->entry_parm = entry_parm;
2275 }
2276 
2277 /* A subroutine of assign_parms.  If there is actually space on the stack
2278    for this parm, count it in stack_args_size and return true.  */
2279 
2280 static bool
assign_parm_is_stack_parm(struct assign_parm_data_all * all,struct assign_parm_data_one * data)2281 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2282 			   struct assign_parm_data_one *data)
2283 {
2284   /* Trivially true if we've no incoming register.  */
2285   if (data->entry_parm == NULL)
2286     ;
2287   /* Also true if we're partially in registers and partially not,
2288      since we've arranged to drop the entire argument on the stack.  */
2289   else if (data->partial != 0)
2290     ;
2291   /* Also true if the target says that it's passed in both registers
2292      and on the stack.  */
2293   else if (GET_CODE (data->entry_parm) == PARALLEL
2294 	   && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2295     ;
2296   /* Also true if the target says that there's stack allocated for
2297      all register parameters.  */
2298   else if (all->reg_parm_stack_space > 0)
2299     ;
2300   /* Otherwise, no, this parameter has no ABI defined stack slot.  */
2301   else
2302     return false;
2303 
2304   all->stack_args_size.constant += data->locate.size.constant;
2305   if (data->locate.size.var)
2306     ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2307 
2308   return true;
2309 }
2310 
2311 /* A subroutine of assign_parms.  Given that this parameter is allocated
2312    stack space by the ABI, find it.  */
2313 
2314 static void
assign_parm_find_stack_rtl(tree parm,struct assign_parm_data_one * data)2315 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2316 {
2317   rtx offset_rtx, stack_parm;
2318   unsigned int align, boundary;
2319 
2320   /* If we're passing this arg using a reg, make its stack home the
2321      aligned stack slot.  */
2322   if (data->entry_parm)
2323     offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2324   else
2325     offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2326 
2327   stack_parm = current_function_internal_arg_pointer;
2328   if (offset_rtx != const0_rtx)
2329     stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2330   stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2331 
2332   set_mem_attributes (stack_parm, parm, 1);
2333 
2334   boundary = data->locate.boundary;
2335   align = BITS_PER_UNIT;
2336 
2337   /* If we're padding upward, we know that the alignment of the slot
2338      is FUNCTION_ARG_BOUNDARY.  If we're using slot_offset, we're
2339      intentionally forcing upward padding.  Otherwise we have to come
2340      up with a guess at the alignment based on OFFSET_RTX.  */
2341   if (data->locate.where_pad != downward || data->entry_parm)
2342     align = boundary;
2343   else if (GET_CODE (offset_rtx) == CONST_INT)
2344     {
2345       align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2346       align = align & -align;
2347     }
2348   set_mem_align (stack_parm, align);
2349 
2350   if (data->entry_parm)
2351     set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2352 
2353   data->stack_parm = stack_parm;
2354 }
2355 
2356 /* A subroutine of assign_parms.  Adjust DATA->ENTRY_RTL such that it's
2357    always valid and contiguous.  */
2358 
2359 static void
assign_parm_adjust_entry_rtl(struct assign_parm_data_one * data)2360 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2361 {
2362   rtx entry_parm = data->entry_parm;
2363   rtx stack_parm = data->stack_parm;
2364 
2365   /* If this parm was passed part in regs and part in memory, pretend it
2366      arrived entirely in memory by pushing the register-part onto the stack.
2367      In the special case of a DImode or DFmode that is split, we could put
2368      it together in a pseudoreg directly, but for now that's not worth
2369      bothering with.  */
2370   if (data->partial != 0)
2371     {
2372       /* Handle calls that pass values in multiple non-contiguous
2373 	 locations.  The Irix 6 ABI has examples of this.  */
2374       if (GET_CODE (entry_parm) == PARALLEL)
2375 	emit_group_store (validize_mem (stack_parm), entry_parm,
2376 			  data->passed_type,
2377 			  int_size_in_bytes (data->passed_type));
2378       else
2379 	{
2380 	  gcc_assert (data->partial % UNITS_PER_WORD == 0);
2381 	  move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2382 			       data->partial / UNITS_PER_WORD);
2383 	}
2384 
2385       entry_parm = stack_parm;
2386     }
2387 
2388   /* If we didn't decide this parm came in a register, by default it came
2389      on the stack.  */
2390   else if (entry_parm == NULL)
2391     entry_parm = stack_parm;
2392 
2393   /* When an argument is passed in multiple locations, we can't make use
2394      of this information, but we can save some copying if the whole argument
2395      is passed in a single register.  */
2396   else if (GET_CODE (entry_parm) == PARALLEL
2397 	   && data->nominal_mode != BLKmode
2398 	   && data->passed_mode != BLKmode)
2399     {
2400       size_t i, len = XVECLEN (entry_parm, 0);
2401 
2402       for (i = 0; i < len; i++)
2403 	if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2404 	    && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2405 	    && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2406 		== data->passed_mode)
2407 	    && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2408 	  {
2409 	    entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2410 	    break;
2411 	  }
2412     }
2413 
2414   data->entry_parm = entry_parm;
2415 }
2416 
2417 /* A subroutine of assign_parms.  Adjust DATA->STACK_RTL such that it's
2418    always valid and properly aligned.  */
2419 
2420 static void
assign_parm_adjust_stack_rtl(struct assign_parm_data_one * data)2421 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2422 {
2423   rtx stack_parm = data->stack_parm;
2424 
2425   /* If we can't trust the parm stack slot to be aligned enough for its
2426      ultimate type, don't use that slot after entry.  We'll make another
2427      stack slot, if we need one.  */
2428   if (stack_parm
2429       && ((STRICT_ALIGNMENT
2430 	   && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2431 	  || (data->nominal_type
2432 	      && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2433 	      && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2434     stack_parm = NULL;
2435 
2436   /* If parm was passed in memory, and we need to convert it on entry,
2437      don't store it back in that same slot.  */
2438   else if (data->entry_parm == stack_parm
2439 	   && data->nominal_mode != BLKmode
2440 	   && data->nominal_mode != data->passed_mode)
2441     stack_parm = NULL;
2442 
2443   /* If stack protection is in effect for this function, don't leave any
2444      pointers in their passed stack slots.  */
2445   else if (cfun->stack_protect_guard
2446 	   && (flag_stack_protect == 2
2447 	       || data->passed_pointer
2448 	       || POINTER_TYPE_P (data->nominal_type)))
2449     stack_parm = NULL;
2450 
2451   data->stack_parm = stack_parm;
2452 }
2453 
2454 /* A subroutine of assign_parms.  Return true if the current parameter
2455    should be stored as a BLKmode in the current frame.  */
2456 
2457 static bool
assign_parm_setup_block_p(struct assign_parm_data_one * data)2458 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2459 {
2460   if (data->nominal_mode == BLKmode)
2461     return true;
2462   if (GET_CODE (data->entry_parm) == PARALLEL)
2463     return true;
2464 
2465 #ifdef BLOCK_REG_PADDING
2466   /* Only assign_parm_setup_block knows how to deal with register arguments
2467      that are padded at the least significant end.  */
2468   if (REG_P (data->entry_parm)
2469       && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2470       && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2471 	  == (BYTES_BIG_ENDIAN ? upward : downward)))
2472     return true;
2473 #endif
2474 
2475   return false;
2476 }
2477 
2478 /* A subroutine of assign_parms.  Arrange for the parameter to be
2479    present and valid in DATA->STACK_RTL.  */
2480 
2481 static void
assign_parm_setup_block(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)2482 assign_parm_setup_block (struct assign_parm_data_all *all,
2483 			 tree parm, struct assign_parm_data_one *data)
2484 {
2485   rtx entry_parm = data->entry_parm;
2486   rtx stack_parm = data->stack_parm;
2487   HOST_WIDE_INT size;
2488   HOST_WIDE_INT size_stored;
2489   rtx orig_entry_parm = entry_parm;
2490 
2491   if (GET_CODE (entry_parm) == PARALLEL)
2492     entry_parm = emit_group_move_into_temps (entry_parm);
2493 
2494   /* If we've a non-block object that's nevertheless passed in parts,
2495      reconstitute it in register operations rather than on the stack.  */
2496   if (GET_CODE (entry_parm) == PARALLEL
2497       && data->nominal_mode != BLKmode)
2498     {
2499       rtx elt0 = XEXP (XVECEXP (orig_entry_parm, 0, 0), 0);
2500 
2501       if ((XVECLEN (entry_parm, 0) > 1
2502 	   || hard_regno_nregs[REGNO (elt0)][GET_MODE (elt0)] > 1)
2503 	  && use_register_for_decl (parm))
2504 	{
2505 	  rtx parmreg = gen_reg_rtx (data->nominal_mode);
2506 
2507 	  push_to_sequence (all->conversion_insns);
2508 
2509 	  /* For values returned in multiple registers, handle possible
2510 	     incompatible calls to emit_group_store.
2511 
2512 	     For example, the following would be invalid, and would have to
2513 	     be fixed by the conditional below:
2514 
2515 	     emit_group_store ((reg:SF), (parallel:DF))
2516 	     emit_group_store ((reg:SI), (parallel:DI))
2517 
2518 	     An example of this are doubles in e500 v2:
2519 	     (parallel:DF (expr_list (reg:SI) (const_int 0))
2520 	     (expr_list (reg:SI) (const_int 4))).  */
2521 	  if (data->nominal_mode != data->passed_mode)
2522 	    {
2523 	      rtx t = gen_reg_rtx (GET_MODE (entry_parm));
2524 	      emit_group_store (t, entry_parm, NULL_TREE,
2525 				GET_MODE_SIZE (GET_MODE (entry_parm)));
2526 	      convert_move (parmreg, t, 0);
2527 	    }
2528 	  else
2529 	    emit_group_store (parmreg, entry_parm, data->nominal_type,
2530 			      int_size_in_bytes (data->nominal_type));
2531 
2532 	  all->conversion_insns = get_insns ();
2533 	  end_sequence ();
2534 
2535 	  SET_DECL_RTL (parm, parmreg);
2536 	  return;
2537 	}
2538     }
2539 
2540   size = int_size_in_bytes (data->passed_type);
2541   size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2542   if (stack_parm == 0)
2543     {
2544       DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2545       stack_parm = assign_stack_local (BLKmode, size_stored,
2546 				       DECL_ALIGN (parm));
2547       if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2548 	PUT_MODE (stack_parm, GET_MODE (entry_parm));
2549       set_mem_attributes (stack_parm, parm, 1);
2550     }
2551 
2552   /* If a BLKmode arrives in registers, copy it to a stack slot.  Handle
2553      calls that pass values in multiple non-contiguous locations.  */
2554   if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2555     {
2556       rtx mem;
2557 
2558       /* Note that we will be storing an integral number of words.
2559 	 So we have to be careful to ensure that we allocate an
2560 	 integral number of words.  We do this above when we call
2561 	 assign_stack_local if space was not allocated in the argument
2562 	 list.  If it was, this will not work if PARM_BOUNDARY is not
2563 	 a multiple of BITS_PER_WORD.  It isn't clear how to fix this
2564 	 if it becomes a problem.  Exception is when BLKmode arrives
2565 	 with arguments not conforming to word_mode.  */
2566 
2567       if (data->stack_parm == 0)
2568 	;
2569       else if (GET_CODE (entry_parm) == PARALLEL)
2570 	;
2571       else
2572 	gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2573 
2574       mem = validize_mem (stack_parm);
2575 
2576       /* Handle values in multiple non-contiguous locations.  */
2577       if (GET_CODE (entry_parm) == PARALLEL)
2578 	{
2579 	  push_to_sequence (all->conversion_insns);
2580 	  emit_group_store (mem, entry_parm, data->passed_type, size);
2581 	  all->conversion_insns = get_insns ();
2582 	  end_sequence ();
2583 	}
2584 
2585       else if (size == 0)
2586 	;
2587 
2588       /* If SIZE is that of a mode no bigger than a word, just use
2589 	 that mode's store operation.  */
2590       else if (size <= UNITS_PER_WORD)
2591 	{
2592 	  enum machine_mode mode
2593 	    = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2594 
2595 	  if (mode != BLKmode
2596 #ifdef BLOCK_REG_PADDING
2597 	      && (size == UNITS_PER_WORD
2598 		  || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2599 		      != (BYTES_BIG_ENDIAN ? upward : downward)))
2600 #endif
2601 	      )
2602 	    {
2603 	      rtx reg = gen_rtx_REG (mode, REGNO (entry_parm));
2604 	      emit_move_insn (change_address (mem, mode, 0), reg);
2605 	    }
2606 
2607 	  /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2608 	     machine must be aligned to the left before storing
2609 	     to memory.  Note that the previous test doesn't
2610 	     handle all cases (e.g. SIZE == 3).  */
2611 	  else if (size != UNITS_PER_WORD
2612 #ifdef BLOCK_REG_PADDING
2613 		   && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2614 		       == downward)
2615 #else
2616 		   && BYTES_BIG_ENDIAN
2617 #endif
2618 		   )
2619 	    {
2620 	      rtx tem, x;
2621 	      int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2622 	      rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2623 
2624 	      x = expand_shift (LSHIFT_EXPR, word_mode, reg,
2625 				build_int_cst (NULL_TREE, by),
2626 				NULL_RTX, 1);
2627 	      tem = change_address (mem, word_mode, 0);
2628 	      emit_move_insn (tem, x);
2629 	    }
2630 	  else
2631 	    move_block_from_reg (REGNO (entry_parm), mem,
2632 				 size_stored / UNITS_PER_WORD);
2633 	}
2634       else
2635 	move_block_from_reg (REGNO (entry_parm), mem,
2636 			     size_stored / UNITS_PER_WORD);
2637     }
2638   else if (data->stack_parm == 0)
2639     {
2640       push_to_sequence (all->conversion_insns);
2641       emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2642 		       BLOCK_OP_NORMAL);
2643       all->conversion_insns = get_insns ();
2644       end_sequence ();
2645     }
2646 
2647   data->stack_parm = stack_parm;
2648   SET_DECL_RTL (parm, stack_parm);
2649 }
2650 
2651 /* A subroutine of assign_parms.  Allocate a pseudo to hold the current
2652    parameter.  Get it there.  Perform all ABI specified conversions.  */
2653 
2654 static void
assign_parm_setup_reg(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)2655 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2656 		       struct assign_parm_data_one *data)
2657 {
2658   rtx parmreg;
2659   enum machine_mode promoted_nominal_mode;
2660   int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2661   bool did_conversion = false;
2662 
2663   /* Store the parm in a pseudoregister during the function, but we may
2664      need to do it in a wider mode.  */
2665 
2666   /* This is not really promoting for a call.  However we need to be
2667      consistent with assign_parm_find_data_types and expand_expr_real_1.  */
2668   promoted_nominal_mode
2669     = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 1);
2670 
2671   parmreg = gen_reg_rtx (promoted_nominal_mode);
2672 
2673   if (!DECL_ARTIFICIAL (parm))
2674     mark_user_reg (parmreg);
2675 
2676   /* If this was an item that we received a pointer to,
2677      set DECL_RTL appropriately.  */
2678   if (data->passed_pointer)
2679     {
2680       rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2681       set_mem_attributes (x, parm, 1);
2682       SET_DECL_RTL (parm, x);
2683     }
2684   else
2685     SET_DECL_RTL (parm, parmreg);
2686 
2687   /* Copy the value into the register.  */
2688   if (data->nominal_mode != data->passed_mode
2689       || promoted_nominal_mode != data->promoted_mode)
2690     {
2691       int save_tree_used;
2692 
2693       /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2694 	 mode, by the caller.  We now have to convert it to
2695 	 NOMINAL_MODE, if different.  However, PARMREG may be in
2696 	 a different mode than NOMINAL_MODE if it is being stored
2697 	 promoted.
2698 
2699 	 If ENTRY_PARM is a hard register, it might be in a register
2700 	 not valid for operating in its mode (e.g., an odd-numbered
2701 	 register for a DFmode).  In that case, moves are the only
2702 	 thing valid, so we can't do a convert from there.  This
2703 	 occurs when the calling sequence allow such misaligned
2704 	 usages.
2705 
2706 	 In addition, the conversion may involve a call, which could
2707 	 clobber parameters which haven't been copied to pseudo
2708 	 registers yet.  Therefore, we must first copy the parm to
2709 	 a pseudo reg here, and save the conversion until after all
2710 	 parameters have been moved.  */
2711 
2712       rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2713 
2714       emit_move_insn (tempreg, validize_mem (data->entry_parm));
2715 
2716       push_to_sequence (all->conversion_insns);
2717       tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2718 
2719       if (GET_CODE (tempreg) == SUBREG
2720 	  && GET_MODE (tempreg) == data->nominal_mode
2721 	  && REG_P (SUBREG_REG (tempreg))
2722 	  && data->nominal_mode == data->passed_mode
2723 	  && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2724 	  && GET_MODE_SIZE (GET_MODE (tempreg))
2725 	     < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
2726 	{
2727 	  /* The argument is already sign/zero extended, so note it
2728 	     into the subreg.  */
2729 	  SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2730 	  SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2731 	}
2732 
2733       /* TREE_USED gets set erroneously during expand_assignment.  */
2734       save_tree_used = TREE_USED (parm);
2735       expand_assignment (parm, make_tree (data->nominal_type, tempreg));
2736       TREE_USED (parm) = save_tree_used;
2737       all->conversion_insns = get_insns ();
2738       end_sequence ();
2739 
2740       did_conversion = true;
2741     }
2742   else
2743     emit_move_insn (parmreg, validize_mem (data->entry_parm));
2744 
2745   /* If we were passed a pointer but the actual value can safely live
2746      in a register, put it in one.  */
2747   if (data->passed_pointer
2748       && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2749       /* If by-reference argument was promoted, demote it.  */
2750       && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2751 	  || use_register_for_decl (parm)))
2752     {
2753       /* We can't use nominal_mode, because it will have been set to
2754 	 Pmode above.  We must use the actual mode of the parm.  */
2755       parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2756       mark_user_reg (parmreg);
2757 
2758       if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2759 	{
2760 	  rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2761 	  int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2762 
2763 	  push_to_sequence (all->conversion_insns);
2764 	  emit_move_insn (tempreg, DECL_RTL (parm));
2765 	  tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2766 	  emit_move_insn (parmreg, tempreg);
2767 	  all->conversion_insns = get_insns ();
2768 	  end_sequence ();
2769 
2770 	  did_conversion = true;
2771 	}
2772       else
2773 	emit_move_insn (parmreg, DECL_RTL (parm));
2774 
2775       SET_DECL_RTL (parm, parmreg);
2776 
2777       /* STACK_PARM is the pointer, not the parm, and PARMREG is
2778 	 now the parm.  */
2779       data->stack_parm = NULL;
2780     }
2781 
2782   /* Mark the register as eliminable if we did no conversion and it was
2783      copied from memory at a fixed offset, and the arg pointer was not
2784      copied to a pseudo-reg.  If the arg pointer is a pseudo reg or the
2785      offset formed an invalid address, such memory-equivalences as we
2786      make here would screw up life analysis for it.  */
2787   if (data->nominal_mode == data->passed_mode
2788       && !did_conversion
2789       && data->stack_parm != 0
2790       && MEM_P (data->stack_parm)
2791       && data->locate.offset.var == 0
2792       && reg_mentioned_p (virtual_incoming_args_rtx,
2793 			  XEXP (data->stack_parm, 0)))
2794     {
2795       rtx linsn = get_last_insn ();
2796       rtx sinsn, set;
2797 
2798       /* Mark complex types separately.  */
2799       if (GET_CODE (parmreg) == CONCAT)
2800 	{
2801 	  enum machine_mode submode
2802 	    = GET_MODE_INNER (GET_MODE (parmreg));
2803 	  int regnor = REGNO (XEXP (parmreg, 0));
2804 	  int regnoi = REGNO (XEXP (parmreg, 1));
2805 	  rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
2806 	  rtx stacki = adjust_address_nv (data->stack_parm, submode,
2807 					  GET_MODE_SIZE (submode));
2808 
2809 	  /* Scan backwards for the set of the real and
2810 	     imaginary parts.  */
2811 	  for (sinsn = linsn; sinsn != 0;
2812 	       sinsn = prev_nonnote_insn (sinsn))
2813 	    {
2814 	      set = single_set (sinsn);
2815 	      if (set == 0)
2816 		continue;
2817 
2818 	      if (SET_DEST (set) == regno_reg_rtx [regnoi])
2819 		REG_NOTES (sinsn)
2820 		  = gen_rtx_EXPR_LIST (REG_EQUIV, stacki,
2821 				       REG_NOTES (sinsn));
2822 	      else if (SET_DEST (set) == regno_reg_rtx [regnor])
2823 		REG_NOTES (sinsn)
2824 		  = gen_rtx_EXPR_LIST (REG_EQUIV, stackr,
2825 				       REG_NOTES (sinsn));
2826 	    }
2827 	}
2828       else if ((set = single_set (linsn)) != 0
2829 	       && SET_DEST (set) == parmreg)
2830 	REG_NOTES (linsn)
2831 	  = gen_rtx_EXPR_LIST (REG_EQUIV,
2832 			       data->stack_parm, REG_NOTES (linsn));
2833     }
2834 
2835   /* For pointer data type, suggest pointer register.  */
2836   if (POINTER_TYPE_P (TREE_TYPE (parm)))
2837     mark_reg_pointer (parmreg,
2838 		      TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2839 }
2840 
2841 /* A subroutine of assign_parms.  Allocate stack space to hold the current
2842    parameter.  Get it there.  Perform all ABI specified conversions.  */
2843 
2844 static void
assign_parm_setup_stack(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)2845 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2846 		         struct assign_parm_data_one *data)
2847 {
2848   /* Value must be stored in the stack slot STACK_PARM during function
2849      execution.  */
2850   bool to_conversion = false;
2851 
2852   if (data->promoted_mode != data->nominal_mode)
2853     {
2854       /* Conversion is required.  */
2855       rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2856 
2857       emit_move_insn (tempreg, validize_mem (data->entry_parm));
2858 
2859       push_to_sequence (all->conversion_insns);
2860       to_conversion = true;
2861 
2862       data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2863 					  TYPE_UNSIGNED (TREE_TYPE (parm)));
2864 
2865       if (data->stack_parm)
2866 	/* ??? This may need a big-endian conversion on sparc64.  */
2867 	data->stack_parm
2868 	  = adjust_address (data->stack_parm, data->nominal_mode, 0);
2869     }
2870 
2871   if (data->entry_parm != data->stack_parm)
2872     {
2873       rtx src, dest;
2874 
2875       if (data->stack_parm == 0)
2876 	{
2877 	  data->stack_parm
2878 	    = assign_stack_local (GET_MODE (data->entry_parm),
2879 				  GET_MODE_SIZE (GET_MODE (data->entry_parm)),
2880 				  TYPE_ALIGN (data->passed_type));
2881 	  set_mem_attributes (data->stack_parm, parm, 1);
2882 	}
2883 
2884       dest = validize_mem (data->stack_parm);
2885       src = validize_mem (data->entry_parm);
2886 
2887       if (MEM_P (src))
2888 	{
2889 	  /* Use a block move to handle potentially misaligned entry_parm.  */
2890 	  if (!to_conversion)
2891 	    push_to_sequence (all->conversion_insns);
2892 	  to_conversion = true;
2893 
2894 	  emit_block_move (dest, src,
2895 			   GEN_INT (int_size_in_bytes (data->passed_type)),
2896 			   BLOCK_OP_NORMAL);
2897 	}
2898       else
2899 	emit_move_insn (dest, src);
2900     }
2901 
2902   if (to_conversion)
2903     {
2904       all->conversion_insns = get_insns ();
2905       end_sequence ();
2906     }
2907 
2908   SET_DECL_RTL (parm, data->stack_parm);
2909 }
2910 
2911 /* A subroutine of assign_parms.  If the ABI splits complex arguments, then
2912    undo the frobbing that we did in assign_parms_augmented_arg_list.  */
2913 
2914 static void
assign_parms_unsplit_complex(struct assign_parm_data_all * all,tree fnargs)2915 assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs)
2916 {
2917   tree parm;
2918   tree orig_fnargs = all->orig_fnargs;
2919 
2920   for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
2921     {
2922       if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
2923 	  && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
2924 	{
2925 	  rtx tmp, real, imag;
2926 	  enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
2927 
2928 	  real = DECL_RTL (fnargs);
2929 	  imag = DECL_RTL (TREE_CHAIN (fnargs));
2930 	  if (inner != GET_MODE (real))
2931 	    {
2932 	      real = gen_lowpart_SUBREG (inner, real);
2933 	      imag = gen_lowpart_SUBREG (inner, imag);
2934 	    }
2935 
2936 	  if (TREE_ADDRESSABLE (parm))
2937 	    {
2938 	      rtx rmem, imem;
2939 	      HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
2940 
2941 	      /* split_complex_arg put the real and imag parts in
2942 		 pseudos.  Move them to memory.  */
2943 	      tmp = assign_stack_local (DECL_MODE (parm), size,
2944 					TYPE_ALIGN (TREE_TYPE (parm)));
2945 	      set_mem_attributes (tmp, parm, 1);
2946 	      rmem = adjust_address_nv (tmp, inner, 0);
2947 	      imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
2948 	      push_to_sequence (all->conversion_insns);
2949 	      emit_move_insn (rmem, real);
2950 	      emit_move_insn (imem, imag);
2951 	      all->conversion_insns = get_insns ();
2952 	      end_sequence ();
2953 	    }
2954 	  else
2955 	    tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2956 	  SET_DECL_RTL (parm, tmp);
2957 
2958 	  real = DECL_INCOMING_RTL (fnargs);
2959 	  imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
2960 	  if (inner != GET_MODE (real))
2961 	    {
2962 	      real = gen_lowpart_SUBREG (inner, real);
2963 	      imag = gen_lowpart_SUBREG (inner, imag);
2964 	    }
2965 	  tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2966 	  set_decl_incoming_rtl (parm, tmp);
2967 	  fnargs = TREE_CHAIN (fnargs);
2968 	}
2969       else
2970 	{
2971 	  SET_DECL_RTL (parm, DECL_RTL (fnargs));
2972 	  set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs));
2973 
2974 	  /* Set MEM_EXPR to the original decl, i.e. to PARM,
2975 	     instead of the copy of decl, i.e. FNARGS.  */
2976 	  if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
2977 	    set_mem_expr (DECL_INCOMING_RTL (parm), parm);
2978 	}
2979 
2980       fnargs = TREE_CHAIN (fnargs);
2981     }
2982 }
2983 
2984 /* Assign RTL expressions to the function's parameters.  This may involve
2985    copying them into registers and using those registers as the DECL_RTL.  */
2986 
2987 static void
assign_parms(tree fndecl)2988 assign_parms (tree fndecl)
2989 {
2990   struct assign_parm_data_all all;
2991   tree fnargs, parm;
2992 
2993   current_function_internal_arg_pointer
2994     = targetm.calls.internal_arg_pointer ();
2995 
2996   assign_parms_initialize_all (&all);
2997   fnargs = assign_parms_augmented_arg_list (&all);
2998 
2999   for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3000     {
3001       struct assign_parm_data_one data;
3002 
3003       /* Extract the type of PARM; adjust it according to ABI.  */
3004       assign_parm_find_data_types (&all, parm, &data);
3005 
3006       /* Early out for errors and void parameters.  */
3007       if (data.passed_mode == VOIDmode)
3008 	{
3009 	  SET_DECL_RTL (parm, const0_rtx);
3010 	  DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3011 	  continue;
3012 	}
3013 
3014       if (current_function_stdarg && !TREE_CHAIN (parm))
3015 	assign_parms_setup_varargs (&all, &data, false);
3016 
3017       /* Find out where the parameter arrives in this function.  */
3018       assign_parm_find_entry_rtl (&all, &data);
3019 
3020       /* Find out where stack space for this parameter might be.  */
3021       if (assign_parm_is_stack_parm (&all, &data))
3022 	{
3023 	  assign_parm_find_stack_rtl (parm, &data);
3024 	  assign_parm_adjust_entry_rtl (&data);
3025 	}
3026 
3027       /* Record permanently how this parm was passed.  */
3028       set_decl_incoming_rtl (parm, data.entry_parm);
3029 
3030       /* Update info on where next arg arrives in registers.  */
3031       FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3032 			    data.passed_type, data.named_arg);
3033 
3034       assign_parm_adjust_stack_rtl (&data);
3035 
3036       if (assign_parm_setup_block_p (&data))
3037 	assign_parm_setup_block (&all, parm, &data);
3038       else if (data.passed_pointer || use_register_for_decl (parm))
3039 	assign_parm_setup_reg (&all, parm, &data);
3040       else
3041 	assign_parm_setup_stack (&all, parm, &data);
3042     }
3043 
3044   if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
3045     assign_parms_unsplit_complex (&all, fnargs);
3046 
3047   /* Output all parameter conversion instructions (possibly including calls)
3048      now that all parameters have been copied out of hard registers.  */
3049   emit_insn (all.conversion_insns);
3050 
3051   /* If we are receiving a struct value address as the first argument, set up
3052      the RTL for the function result. As this might require code to convert
3053      the transmitted address to Pmode, we do this here to ensure that possible
3054      preliminary conversions of the address have been emitted already.  */
3055   if (all.function_result_decl)
3056     {
3057       tree result = DECL_RESULT (current_function_decl);
3058       rtx addr = DECL_RTL (all.function_result_decl);
3059       rtx x;
3060 
3061       if (DECL_BY_REFERENCE (result))
3062 	x = addr;
3063       else
3064 	{
3065 	  addr = convert_memory_address (Pmode, addr);
3066 	  x = gen_rtx_MEM (DECL_MODE (result), addr);
3067 	  set_mem_attributes (x, result, 1);
3068 	}
3069       SET_DECL_RTL (result, x);
3070     }
3071 
3072   /* We have aligned all the args, so add space for the pretend args.  */
3073   current_function_pretend_args_size = all.pretend_args_size;
3074   all.stack_args_size.constant += all.extra_pretend_bytes;
3075   current_function_args_size = all.stack_args_size.constant;
3076 
3077   /* Adjust function incoming argument size for alignment and
3078      minimum length.  */
3079 
3080 #ifdef REG_PARM_STACK_SPACE
3081   current_function_args_size = MAX (current_function_args_size,
3082 				    REG_PARM_STACK_SPACE (fndecl));
3083 #endif
3084 
3085   current_function_args_size = CEIL_ROUND (current_function_args_size,
3086 					   PARM_BOUNDARY / BITS_PER_UNIT);
3087 
3088 #ifdef ARGS_GROW_DOWNWARD
3089   current_function_arg_offset_rtx
3090     = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3091        : expand_expr (size_diffop (all.stack_args_size.var,
3092 				   size_int (-all.stack_args_size.constant)),
3093 		      NULL_RTX, VOIDmode, 0));
3094 #else
3095   current_function_arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3096 #endif
3097 
3098   /* See how many bytes, if any, of its args a function should try to pop
3099      on return.  */
3100 
3101   current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
3102 						 current_function_args_size);
3103 
3104   /* For stdarg.h function, save info about
3105      regs and stack space used by the named args.  */
3106 
3107   current_function_args_info = all.args_so_far;
3108 
3109   /* Set the rtx used for the function return value.  Put this in its
3110      own variable so any optimizers that need this information don't have
3111      to include tree.h.  Do this here so it gets done when an inlined
3112      function gets output.  */
3113 
3114   current_function_return_rtx
3115     = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3116        ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3117 
3118   /* If scalar return value was computed in a pseudo-reg, or was a named
3119      return value that got dumped to the stack, copy that to the hard
3120      return register.  */
3121   if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3122     {
3123       tree decl_result = DECL_RESULT (fndecl);
3124       rtx decl_rtl = DECL_RTL (decl_result);
3125 
3126       if (REG_P (decl_rtl)
3127 	  ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3128 	  : DECL_REGISTER (decl_result))
3129 	{
3130 	  rtx real_decl_rtl;
3131 
3132 	  real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3133 							fndecl, true);
3134 	  REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3135 	  /* The delay slot scheduler assumes that current_function_return_rtx
3136 	     holds the hard register containing the return value, not a
3137 	     temporary pseudo.  */
3138 	  current_function_return_rtx = real_decl_rtl;
3139 	}
3140     }
3141 }
3142 
3143 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3144    For all seen types, gimplify their sizes.  */
3145 
3146 static tree
gimplify_parm_type(tree * tp,int * walk_subtrees,void * data)3147 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3148 {
3149   tree t = *tp;
3150 
3151   *walk_subtrees = 0;
3152   if (TYPE_P (t))
3153     {
3154       if (POINTER_TYPE_P (t))
3155 	*walk_subtrees = 1;
3156       else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3157 	       && !TYPE_SIZES_GIMPLIFIED (t))
3158 	{
3159 	  gimplify_type_sizes (t, (tree *) data);
3160 	  *walk_subtrees = 1;
3161 	}
3162     }
3163 
3164   return NULL;
3165 }
3166 
3167 /* Gimplify the parameter list for current_function_decl.  This involves
3168    evaluating SAVE_EXPRs of variable sized parameters and generating code
3169    to implement callee-copies reference parameters.  Returns a list of
3170    statements to add to the beginning of the function, or NULL if nothing
3171    to do.  */
3172 
3173 tree
gimplify_parameters(void)3174 gimplify_parameters (void)
3175 {
3176   struct assign_parm_data_all all;
3177   tree fnargs, parm, stmts = NULL;
3178 
3179   assign_parms_initialize_all (&all);
3180   fnargs = assign_parms_augmented_arg_list (&all);
3181 
3182   for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3183     {
3184       struct assign_parm_data_one data;
3185 
3186       /* Extract the type of PARM; adjust it according to ABI.  */
3187       assign_parm_find_data_types (&all, parm, &data);
3188 
3189       /* Early out for errors and void parameters.  */
3190       if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3191 	continue;
3192 
3193       /* Update info on where next arg arrives in registers.  */
3194       FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3195 			    data.passed_type, data.named_arg);
3196 
3197       /* ??? Once upon a time variable_size stuffed parameter list
3198 	 SAVE_EXPRs (amongst others) onto a pending sizes list.  This
3199 	 turned out to be less than manageable in the gimple world.
3200 	 Now we have to hunt them down ourselves.  */
3201       walk_tree_without_duplicates (&data.passed_type,
3202 				    gimplify_parm_type, &stmts);
3203 
3204       if (!TREE_CONSTANT (DECL_SIZE (parm)))
3205 	{
3206 	  gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3207 	  gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3208 	}
3209 
3210       if (data.passed_pointer)
3211 	{
3212           tree type = TREE_TYPE (data.passed_type);
3213 	  if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
3214 				       type, data.named_arg))
3215 	    {
3216 	      tree local, t;
3217 
3218 	      /* For constant sized objects, this is trivial; for
3219 		 variable-sized objects, we have to play games.  */
3220 	      if (TREE_CONSTANT (DECL_SIZE (parm)))
3221 		{
3222 		  local = create_tmp_var (type, get_name (parm));
3223 		  DECL_IGNORED_P (local) = 0;
3224 		}
3225 	      else
3226 		{
3227 		  tree ptr_type, addr, args;
3228 
3229 		  ptr_type = build_pointer_type (type);
3230 		  addr = create_tmp_var (ptr_type, get_name (parm));
3231 		  DECL_IGNORED_P (addr) = 0;
3232 		  local = build_fold_indirect_ref (addr);
3233 
3234 		  args = tree_cons (NULL, DECL_SIZE_UNIT (parm), NULL);
3235 		  t = built_in_decls[BUILT_IN_ALLOCA];
3236 		  t = build_function_call_expr (t, args);
3237 		  t = fold_convert (ptr_type, t);
3238 		  t = build2 (MODIFY_EXPR, void_type_node, addr, t);
3239 		  gimplify_and_add (t, &stmts);
3240 		}
3241 
3242 	      t = build2 (MODIFY_EXPR, void_type_node, local, parm);
3243 	      gimplify_and_add (t, &stmts);
3244 
3245 	      SET_DECL_VALUE_EXPR (parm, local);
3246 	      DECL_HAS_VALUE_EXPR_P (parm) = 1;
3247 	    }
3248 	}
3249     }
3250 
3251   return stmts;
3252 }
3253 
3254 /* Indicate whether REGNO is an incoming argument to the current function
3255    that was promoted to a wider mode.  If so, return the RTX for the
3256    register (to get its mode).  PMODE and PUNSIGNEDP are set to the mode
3257    that REGNO is promoted from and whether the promotion was signed or
3258    unsigned.  */
3259 
3260 rtx
promoted_input_arg(unsigned int regno,enum machine_mode * pmode,int * punsignedp)3261 promoted_input_arg (unsigned int regno, enum machine_mode *pmode, int *punsignedp)
3262 {
3263   tree arg;
3264 
3265   for (arg = DECL_ARGUMENTS (current_function_decl); arg;
3266        arg = TREE_CHAIN (arg))
3267     if (REG_P (DECL_INCOMING_RTL (arg))
3268 	&& REGNO (DECL_INCOMING_RTL (arg)) == regno
3269 	&& TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
3270       {
3271 	enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
3272 	int unsignedp = TYPE_UNSIGNED (TREE_TYPE (arg));
3273 
3274 	mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
3275 	if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
3276 	    && mode != DECL_MODE (arg))
3277 	  {
3278 	    *pmode = DECL_MODE (arg);
3279 	    *punsignedp = unsignedp;
3280 	    return DECL_INCOMING_RTL (arg);
3281 	  }
3282       }
3283 
3284   return 0;
3285 }
3286 
3287 
3288 /* Compute the size and offset from the start of the stacked arguments for a
3289    parm passed in mode PASSED_MODE and with type TYPE.
3290 
3291    INITIAL_OFFSET_PTR points to the current offset into the stacked
3292    arguments.
3293 
3294    The starting offset and size for this parm are returned in
3295    LOCATE->OFFSET and LOCATE->SIZE, respectively.  When IN_REGS is
3296    nonzero, the offset is that of stack slot, which is returned in
3297    LOCATE->SLOT_OFFSET.  LOCATE->ALIGNMENT_PAD is the amount of
3298    padding required from the initial offset ptr to the stack slot.
3299 
3300    IN_REGS is nonzero if the argument will be passed in registers.  It will
3301    never be set if REG_PARM_STACK_SPACE is not defined.
3302 
3303    FNDECL is the function in which the argument was defined.
3304 
3305    There are two types of rounding that are done.  The first, controlled by
3306    FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3307    list to be aligned to the specific boundary (in bits).  This rounding
3308    affects the initial and starting offsets, but not the argument size.
3309 
3310    The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3311    optionally rounds the size of the parm to PARM_BOUNDARY.  The
3312    initial offset is not affected by this rounding, while the size always
3313    is and the starting offset may be.  */
3314 
3315 /*  LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3316     INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3317     callers pass in the total size of args so far as
3318     INITIAL_OFFSET_PTR.  LOCATE->SIZE is always positive.  */
3319 
3320 void
locate_and_pad_parm(enum machine_mode passed_mode,tree type,int in_regs,int partial,tree fndecl ATTRIBUTE_UNUSED,struct args_size * initial_offset_ptr,struct locate_and_pad_arg_data * locate)3321 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3322 		     int partial, tree fndecl ATTRIBUTE_UNUSED,
3323 		     struct args_size *initial_offset_ptr,
3324 		     struct locate_and_pad_arg_data *locate)
3325 {
3326   tree sizetree;
3327   enum direction where_pad;
3328   unsigned int boundary;
3329   int reg_parm_stack_space = 0;
3330   int part_size_in_regs;
3331 
3332 #ifdef REG_PARM_STACK_SPACE
3333   reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3334 
3335   /* If we have found a stack parm before we reach the end of the
3336      area reserved for registers, skip that area.  */
3337   if (! in_regs)
3338     {
3339       if (reg_parm_stack_space > 0)
3340 	{
3341 	  if (initial_offset_ptr->var)
3342 	    {
3343 	      initial_offset_ptr->var
3344 		= size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3345 			      ssize_int (reg_parm_stack_space));
3346 	      initial_offset_ptr->constant = 0;
3347 	    }
3348 	  else if (initial_offset_ptr->constant < reg_parm_stack_space)
3349 	    initial_offset_ptr->constant = reg_parm_stack_space;
3350 	}
3351     }
3352 #endif /* REG_PARM_STACK_SPACE */
3353 
3354   part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3355 
3356   sizetree
3357     = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3358   where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3359   boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3360   locate->where_pad = where_pad;
3361   locate->boundary = boundary;
3362 
3363   /* Remember if the outgoing parameter requires extra alignment on the
3364      calling function side.  */
3365   if (boundary > PREFERRED_STACK_BOUNDARY)
3366     boundary = PREFERRED_STACK_BOUNDARY;
3367   if (cfun->stack_alignment_needed < boundary)
3368     cfun->stack_alignment_needed = boundary;
3369 
3370 #ifdef ARGS_GROW_DOWNWARD
3371   locate->slot_offset.constant = -initial_offset_ptr->constant;
3372   if (initial_offset_ptr->var)
3373     locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3374 					  initial_offset_ptr->var);
3375 
3376   {
3377     tree s2 = sizetree;
3378     if (where_pad != none
3379 	&& (!host_integerp (sizetree, 1)
3380 	    || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3381       s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3382     SUB_PARM_SIZE (locate->slot_offset, s2);
3383   }
3384 
3385   locate->slot_offset.constant += part_size_in_regs;
3386 
3387   if (!in_regs
3388 #ifdef REG_PARM_STACK_SPACE
3389       || REG_PARM_STACK_SPACE (fndecl) > 0
3390 #endif
3391      )
3392     pad_to_arg_alignment (&locate->slot_offset, boundary,
3393 			  &locate->alignment_pad);
3394 
3395   locate->size.constant = (-initial_offset_ptr->constant
3396 			   - locate->slot_offset.constant);
3397   if (initial_offset_ptr->var)
3398     locate->size.var = size_binop (MINUS_EXPR,
3399 				   size_binop (MINUS_EXPR,
3400 					       ssize_int (0),
3401 					       initial_offset_ptr->var),
3402 				   locate->slot_offset.var);
3403 
3404   /* Pad_below needs the pre-rounded size to know how much to pad
3405      below.  */
3406   locate->offset = locate->slot_offset;
3407   if (where_pad == downward)
3408     pad_below (&locate->offset, passed_mode, sizetree);
3409 
3410 #else /* !ARGS_GROW_DOWNWARD */
3411   if (!in_regs
3412 #ifdef REG_PARM_STACK_SPACE
3413       || REG_PARM_STACK_SPACE (fndecl) > 0
3414 #endif
3415       )
3416     pad_to_arg_alignment (initial_offset_ptr, boundary,
3417 			  &locate->alignment_pad);
3418   locate->slot_offset = *initial_offset_ptr;
3419 
3420 #ifdef PUSH_ROUNDING
3421   if (passed_mode != BLKmode)
3422     sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3423 #endif
3424 
3425   /* Pad_below needs the pre-rounded size to know how much to pad below
3426      so this must be done before rounding up.  */
3427   locate->offset = locate->slot_offset;
3428   if (where_pad == downward)
3429     pad_below (&locate->offset, passed_mode, sizetree);
3430 
3431   if (where_pad != none
3432       && (!host_integerp (sizetree, 1)
3433 	  || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3434     sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3435 
3436   ADD_PARM_SIZE (locate->size, sizetree);
3437 
3438   locate->size.constant -= part_size_in_regs;
3439 #endif /* ARGS_GROW_DOWNWARD */
3440 }
3441 
3442 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3443    BOUNDARY is measured in bits, but must be a multiple of a storage unit.  */
3444 
3445 static void
pad_to_arg_alignment(struct args_size * offset_ptr,int boundary,struct args_size * alignment_pad)3446 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3447 		      struct args_size *alignment_pad)
3448 {
3449   tree save_var = NULL_TREE;
3450   HOST_WIDE_INT save_constant = 0;
3451   int boundary_in_bytes = boundary / BITS_PER_UNIT;
3452   HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3453 
3454 #ifdef SPARC_STACK_BOUNDARY_HACK
3455   /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3456      the real alignment of %sp.  However, when it does this, the
3457      alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY.  */
3458   if (SPARC_STACK_BOUNDARY_HACK)
3459     sp_offset = 0;
3460 #endif
3461 
3462   if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3463     {
3464       save_var = offset_ptr->var;
3465       save_constant = offset_ptr->constant;
3466     }
3467 
3468   alignment_pad->var = NULL_TREE;
3469   alignment_pad->constant = 0;
3470 
3471   if (boundary > BITS_PER_UNIT)
3472     {
3473       if (offset_ptr->var)
3474 	{
3475 	  tree sp_offset_tree = ssize_int (sp_offset);
3476 	  tree offset = size_binop (PLUS_EXPR,
3477 				    ARGS_SIZE_TREE (*offset_ptr),
3478 				    sp_offset_tree);
3479 #ifdef ARGS_GROW_DOWNWARD
3480 	  tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3481 #else
3482 	  tree rounded = round_up   (offset, boundary / BITS_PER_UNIT);
3483 #endif
3484 
3485 	  offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3486 	  /* ARGS_SIZE_TREE includes constant term.  */
3487 	  offset_ptr->constant = 0;
3488 	  if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3489 	    alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3490 					     save_var);
3491 	}
3492       else
3493 	{
3494 	  offset_ptr->constant = -sp_offset +
3495 #ifdef ARGS_GROW_DOWNWARD
3496 	    FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3497 #else
3498 	    CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3499 #endif
3500 	    if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3501 	      alignment_pad->constant = offset_ptr->constant - save_constant;
3502 	}
3503     }
3504 }
3505 
3506 static void
pad_below(struct args_size * offset_ptr,enum machine_mode passed_mode,tree sizetree)3507 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3508 {
3509   if (passed_mode != BLKmode)
3510     {
3511       if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3512 	offset_ptr->constant
3513 	  += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3514 	       / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3515 	      - GET_MODE_SIZE (passed_mode));
3516     }
3517   else
3518     {
3519       if (TREE_CODE (sizetree) != INTEGER_CST
3520 	  || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3521 	{
3522 	  /* Round the size up to multiple of PARM_BOUNDARY bits.  */
3523 	  tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3524 	  /* Add it in.  */
3525 	  ADD_PARM_SIZE (*offset_ptr, s2);
3526 	  SUB_PARM_SIZE (*offset_ptr, sizetree);
3527 	}
3528     }
3529 }
3530 
3531 /* Walk the tree of blocks describing the binding levels within a function
3532    and warn about variables the might be killed by setjmp or vfork.
3533    This is done after calling flow_analysis and before global_alloc
3534    clobbers the pseudo-regs to hard regs.  */
3535 
3536 void
setjmp_vars_warning(tree block)3537 setjmp_vars_warning (tree block)
3538 {
3539   tree decl, sub;
3540 
3541   for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3542     {
3543       if (TREE_CODE (decl) == VAR_DECL
3544 	  && DECL_RTL_SET_P (decl)
3545 	  && REG_P (DECL_RTL (decl))
3546 	  && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3547 	warning (0, "variable %q+D might be clobbered by %<longjmp%>"
3548 		 " or %<vfork%>",
3549 		 decl);
3550     }
3551 
3552   for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3553     setjmp_vars_warning (sub);
3554 }
3555 
3556 /* Do the appropriate part of setjmp_vars_warning
3557    but for arguments instead of local variables.  */
3558 
3559 void
setjmp_args_warning(void)3560 setjmp_args_warning (void)
3561 {
3562   tree decl;
3563   for (decl = DECL_ARGUMENTS (current_function_decl);
3564        decl; decl = TREE_CHAIN (decl))
3565     if (DECL_RTL (decl) != 0
3566 	&& REG_P (DECL_RTL (decl))
3567 	&& regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3568       warning (0, "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3569 	       decl);
3570 }
3571 
3572 
3573 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3574    and create duplicate blocks.  */
3575 /* ??? Need an option to either create block fragments or to create
3576    abstract origin duplicates of a source block.  It really depends
3577    on what optimization has been performed.  */
3578 
3579 void
reorder_blocks(void)3580 reorder_blocks (void)
3581 {
3582   tree block = DECL_INITIAL (current_function_decl);
3583   VEC(tree,heap) *block_stack;
3584 
3585   if (block == NULL_TREE)
3586     return;
3587 
3588   block_stack = VEC_alloc (tree, heap, 10);
3589 
3590   /* Reset the TREE_ASM_WRITTEN bit for all blocks.  */
3591   clear_block_marks (block);
3592 
3593   /* Prune the old trees away, so that they don't get in the way.  */
3594   BLOCK_SUBBLOCKS (block) = NULL_TREE;
3595   BLOCK_CHAIN (block) = NULL_TREE;
3596 
3597   /* Recreate the block tree from the note nesting.  */
3598   reorder_blocks_1 (get_insns (), block, &block_stack);
3599   BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
3600 
3601   VEC_free (tree, heap, block_stack);
3602 }
3603 
3604 /* Helper function for reorder_blocks.  Reset TREE_ASM_WRITTEN.  */
3605 
3606 void
clear_block_marks(tree block)3607 clear_block_marks (tree block)
3608 {
3609   while (block)
3610     {
3611       TREE_ASM_WRITTEN (block) = 0;
3612       clear_block_marks (BLOCK_SUBBLOCKS (block));
3613       block = BLOCK_CHAIN (block);
3614     }
3615 }
3616 
3617 static void
reorder_blocks_1(rtx insns,tree current_block,VEC (tree,heap)** p_block_stack)3618 reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
3619 {
3620   rtx insn;
3621 
3622   for (insn = insns; insn; insn = NEXT_INSN (insn))
3623     {
3624       if (NOTE_P (insn))
3625 	{
3626 	  if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3627 	    {
3628 	      tree block = NOTE_BLOCK (insn);
3629 	      tree origin;
3630 
3631 	      origin = (BLOCK_FRAGMENT_ORIGIN (block)
3632 			? BLOCK_FRAGMENT_ORIGIN (block)
3633 			: block);
3634 
3635 	      /* If we have seen this block before, that means it now
3636 		 spans multiple address regions.  Create a new fragment.  */
3637 	      if (TREE_ASM_WRITTEN (block))
3638 		{
3639 		  tree new_block = copy_node (block);
3640 
3641 		  BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3642 		  BLOCK_FRAGMENT_CHAIN (new_block)
3643 		    = BLOCK_FRAGMENT_CHAIN (origin);
3644 		  BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3645 
3646 		  NOTE_BLOCK (insn) = new_block;
3647 		  block = new_block;
3648 		}
3649 
3650 	      BLOCK_SUBBLOCKS (block) = 0;
3651 	      TREE_ASM_WRITTEN (block) = 1;
3652 	      /* When there's only one block for the entire function,
3653 		 current_block == block and we mustn't do this, it
3654 		 will cause infinite recursion.  */
3655 	      if (block != current_block)
3656 		{
3657 		  if (block != origin)
3658 		    gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block);
3659 
3660 		  BLOCK_SUPERCONTEXT (block) = current_block;
3661 		  BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3662 		  BLOCK_SUBBLOCKS (current_block) = block;
3663 		  current_block = origin;
3664 		}
3665 	      VEC_safe_push (tree, heap, *p_block_stack, block);
3666 	    }
3667 	  else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3668 	    {
3669 	      NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
3670 	      BLOCK_SUBBLOCKS (current_block)
3671 		= blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3672 	      current_block = BLOCK_SUPERCONTEXT (current_block);
3673 	    }
3674 	}
3675     }
3676 }
3677 
3678 /* Reverse the order of elements in the chain T of blocks,
3679    and return the new head of the chain (old last element).  */
3680 
3681 tree
blocks_nreverse(tree t)3682 blocks_nreverse (tree t)
3683 {
3684   tree prev = 0, decl, next;
3685   for (decl = t; decl; decl = next)
3686     {
3687       next = BLOCK_CHAIN (decl);
3688       BLOCK_CHAIN (decl) = prev;
3689       prev = decl;
3690     }
3691   return prev;
3692 }
3693 
3694 /* Count the subblocks of the list starting with BLOCK.  If VECTOR is
3695    non-NULL, list them all into VECTOR, in a depth-first preorder
3696    traversal of the block tree.  Also clear TREE_ASM_WRITTEN in all
3697    blocks.  */
3698 
3699 static int
all_blocks(tree block,tree * vector)3700 all_blocks (tree block, tree *vector)
3701 {
3702   int n_blocks = 0;
3703 
3704   while (block)
3705     {
3706       TREE_ASM_WRITTEN (block) = 0;
3707 
3708       /* Record this block.  */
3709       if (vector)
3710 	vector[n_blocks] = block;
3711 
3712       ++n_blocks;
3713 
3714       /* Record the subblocks, and their subblocks...  */
3715       n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3716 			      vector ? vector + n_blocks : 0);
3717       block = BLOCK_CHAIN (block);
3718     }
3719 
3720   return n_blocks;
3721 }
3722 
3723 /* Return a vector containing all the blocks rooted at BLOCK.  The
3724    number of elements in the vector is stored in N_BLOCKS_P.  The
3725    vector is dynamically allocated; it is the caller's responsibility
3726    to call `free' on the pointer returned.  */
3727 
3728 static tree *
get_block_vector(tree block,int * n_blocks_p)3729 get_block_vector (tree block, int *n_blocks_p)
3730 {
3731   tree *block_vector;
3732 
3733   *n_blocks_p = all_blocks (block, NULL);
3734   block_vector = XNEWVEC (tree, *n_blocks_p);
3735   all_blocks (block, block_vector);
3736 
3737   return block_vector;
3738 }
3739 
3740 static GTY(()) int next_block_index = 2;
3741 
3742 /* Set BLOCK_NUMBER for all the blocks in FN.  */
3743 
3744 void
number_blocks(tree fn)3745 number_blocks (tree fn)
3746 {
3747   int i;
3748   int n_blocks;
3749   tree *block_vector;
3750 
3751   /* For SDB and XCOFF debugging output, we start numbering the blocks
3752      from 1 within each function, rather than keeping a running
3753      count.  */
3754 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3755   if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3756     next_block_index = 1;
3757 #endif
3758 
3759   block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3760 
3761   /* The top-level BLOCK isn't numbered at all.  */
3762   for (i = 1; i < n_blocks; ++i)
3763     /* We number the blocks from two.  */
3764     BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3765 
3766   free (block_vector);
3767 
3768   return;
3769 }
3770 
3771 /* If VAR is present in a subblock of BLOCK, return the subblock.  */
3772 
3773 tree
debug_find_var_in_block_tree(tree var,tree block)3774 debug_find_var_in_block_tree (tree var, tree block)
3775 {
3776   tree t;
3777 
3778   for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3779     if (t == var)
3780       return block;
3781 
3782   for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
3783     {
3784       tree ret = debug_find_var_in_block_tree (var, t);
3785       if (ret)
3786 	return ret;
3787     }
3788 
3789   return NULL_TREE;
3790 }
3791 
3792 /* Allocate a function structure for FNDECL and set its contents
3793    to the defaults.  */
3794 
3795 void
allocate_struct_function(tree fndecl)3796 allocate_struct_function (tree fndecl)
3797 {
3798   tree result;
3799   tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
3800 
3801   cfun = ggc_alloc_cleared (sizeof (struct function));
3802 
3803   cfun->stack_alignment_needed = STACK_BOUNDARY;
3804   cfun->preferred_stack_boundary = STACK_BOUNDARY;
3805 
3806   current_function_funcdef_no = funcdef_no++;
3807 
3808   cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
3809 
3810   init_eh_for_function ();
3811 
3812   lang_hooks.function.init (cfun);
3813   if (init_machine_status)
3814     cfun->machine = (*init_machine_status) ();
3815 
3816   if (fndecl == NULL)
3817     return;
3818 
3819   DECL_STRUCT_FUNCTION (fndecl) = cfun;
3820   cfun->decl = fndecl;
3821 
3822   /* APPLE LOCAL begin radar 5732232 - blocks */
3823   /* We cannot support blocks which return aggregates because at this
3824      point we do not have info on the return type. */
3825   if (!cur_block)
3826   {
3827     result = DECL_RESULT (fndecl);
3828     if (aggregate_value_p (result, fndecl))
3829     {
3830 #ifdef PCC_STATIC_STRUCT_RETURN
3831       current_function_returns_pcc_struct = 1;
3832 #endif
3833       current_function_returns_struct = 1;
3834     }
3835     /* This code is not used anywhere ! */
3836     current_function_returns_pointer = POINTER_TYPE_P (TREE_TYPE (result));
3837   }
3838   /* APPLE LOCAL end radar 5732232 - blocks */
3839   current_function_stdarg
3840     = (fntype
3841        && TYPE_ARG_TYPES (fntype) != 0
3842        && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3843 	   != void_type_node));
3844 
3845   /* Assume all registers in stdarg functions need to be saved.  */
3846   cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
3847   cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
3848 }
3849 
3850 /* Reset cfun, and other non-struct-function variables to defaults as
3851    appropriate for emitting rtl at the start of a function.  */
3852 
3853 static void
prepare_function_start(tree fndecl)3854 prepare_function_start (tree fndecl)
3855 {
3856   if (fndecl && DECL_STRUCT_FUNCTION (fndecl))
3857     cfun = DECL_STRUCT_FUNCTION (fndecl);
3858   else
3859     allocate_struct_function (fndecl);
3860   init_emit ();
3861   init_varasm_status (cfun);
3862   init_expr ();
3863 
3864   cse_not_expected = ! optimize;
3865 
3866   /* Caller save not needed yet.  */
3867   caller_save_needed = 0;
3868 
3869   /* We haven't done register allocation yet.  */
3870   reg_renumber = 0;
3871 
3872   /* Indicate that we have not instantiated virtual registers yet.  */
3873   virtuals_instantiated = 0;
3874 
3875   /* Indicate that we want CONCATs now.  */
3876   generating_concat_p = 1;
3877 
3878   /* Indicate we have no need of a frame pointer yet.  */
3879   frame_pointer_needed = 0;
3880 }
3881 
3882 /* Initialize the rtl expansion mechanism so that we can do simple things
3883    like generate sequences.  This is used to provide a context during global
3884    initialization of some passes.  */
3885 void
init_dummy_function_start(void)3886 init_dummy_function_start (void)
3887 {
3888   prepare_function_start (NULL);
3889 }
3890 
3891 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3892    and initialize static variables for generating RTL for the statements
3893    of the function.  */
3894 
3895 void
init_function_start(tree subr)3896 init_function_start (tree subr)
3897 {
3898   prepare_function_start (subr);
3899 
3900   /* Prevent ever trying to delete the first instruction of a
3901      function.  Also tell final how to output a linenum before the
3902      function prologue.  Note linenums could be missing, e.g. when
3903      compiling a Java .class file.  */
3904   if (! DECL_IS_BUILTIN (subr))
3905     emit_line_note (DECL_SOURCE_LOCATION (subr));
3906 
3907   /* Make sure first insn is a note even if we don't want linenums.
3908      This makes sure the first insn will never be deleted.
3909      Also, final expects a note to appear there.  */
3910   emit_note (NOTE_INSN_DELETED);
3911 
3912   /* Warn if this value is an aggregate type,
3913      regardless of which calling convention we are using for it.  */
3914   if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
3915     warning (OPT_Waggregate_return, "function returns an aggregate");
3916 }
3917 
3918 /* Make sure all values used by the optimization passes have sane
3919    defaults.  */
3920 unsigned int
init_function_for_compilation(void)3921 init_function_for_compilation (void)
3922 {
3923   reg_renumber = 0;
3924 
3925   /* No prologue/epilogue insns yet.  Make sure that these vectors are
3926      empty.  */
3927   gcc_assert (VEC_length (int, prologue) == 0);
3928   gcc_assert (VEC_length (int, epilogue) == 0);
3929   gcc_assert (VEC_length (int, sibcall_epilogue) == 0);
3930   return 0;
3931 }
3932 
3933 struct tree_opt_pass pass_init_function =
3934 {
3935   NULL,                                 /* name */
3936   NULL,                                 /* gate */
3937   init_function_for_compilation,        /* execute */
3938   NULL,                                 /* sub */
3939   NULL,                                 /* next */
3940   0,                                    /* static_pass_number */
3941   0,                                    /* tv_id */
3942   0,                                    /* properties_required */
3943   0,                                    /* properties_provided */
3944   0,                                    /* properties_destroyed */
3945   0,                                    /* todo_flags_start */
3946   0,                                    /* todo_flags_finish */
3947   0                                     /* letter */
3948 };
3949 
3950 
3951 void
expand_main_function(void)3952 expand_main_function (void)
3953 {
3954 #if (defined(INVOKE__main)				\
3955      || (!defined(HAS_INIT_SECTION)			\
3956 	 && !defined(INIT_SECTION_ASM_OP)		\
3957 	 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
3958   emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
3959 #endif
3960 }
3961 
3962 /* Expand code to initialize the stack_protect_guard.  This is invoked at
3963    the beginning of a function to be protected.  */
3964 
3965 #ifndef HAVE_stack_protect_set
3966 # define HAVE_stack_protect_set		0
3967 # define gen_stack_protect_set(x,y)	(gcc_unreachable (), NULL_RTX)
3968 #endif
3969 
3970 void
stack_protect_prologue(void)3971 stack_protect_prologue (void)
3972 {
3973   tree guard_decl = targetm.stack_protect_guard ();
3974   rtx x, y;
3975 
3976   /* Avoid expand_expr here, because we don't want guard_decl pulled
3977      into registers unless absolutely necessary.  And we know that
3978      cfun->stack_protect_guard is a local stack slot, so this skips
3979      all the fluff.  */
3980   x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
3981   y = validize_mem (DECL_RTL (guard_decl));
3982 
3983   /* Allow the target to copy from Y to X without leaking Y into a
3984      register.  */
3985   if (HAVE_stack_protect_set)
3986     {
3987       rtx insn = gen_stack_protect_set (x, y);
3988       if (insn)
3989 	{
3990 	  emit_insn (insn);
3991 	  return;
3992 	}
3993     }
3994 
3995   /* Otherwise do a straight move.  */
3996   emit_move_insn (x, y);
3997 }
3998 
3999 /* Expand code to verify the stack_protect_guard.  This is invoked at
4000    the end of a function to be protected.  */
4001 
4002 #ifndef HAVE_stack_protect_test
4003 # define HAVE_stack_protect_test		0
4004 # define gen_stack_protect_test(x, y, z)	(gcc_unreachable (), NULL_RTX)
4005 #endif
4006 
4007 void
stack_protect_epilogue(void)4008 stack_protect_epilogue (void)
4009 {
4010   tree guard_decl = targetm.stack_protect_guard ();
4011   rtx label = gen_label_rtx ();
4012   rtx x, y, tmp;
4013 
4014   /* Avoid expand_expr here, because we don't want guard_decl pulled
4015      into registers unless absolutely necessary.  And we know that
4016      cfun->stack_protect_guard is a local stack slot, so this skips
4017      all the fluff.  */
4018   x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
4019   y = validize_mem (DECL_RTL (guard_decl));
4020 
4021   /* Allow the target to compare Y with X without leaking either into
4022      a register.  */
4023   if (HAVE_stack_protect_test != 0)
4024     {
4025       tmp = gen_stack_protect_test (x, y, label);
4026       if (tmp)
4027 	{
4028 	  emit_insn (tmp);
4029 	  goto done;
4030 	}
4031     }
4032 
4033   emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4034  done:
4035 
4036   /* The noreturn predictor has been moved to the tree level.  The rtl-level
4037      predictors estimate this branch about 20%, which isn't enough to get
4038      things moved out of line.  Since this is the only extant case of adding
4039      a noreturn function at the rtl level, it doesn't seem worth doing ought
4040      except adding the prediction by hand.  */
4041   tmp = get_last_insn ();
4042   if (JUMP_P (tmp))
4043     predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4044 
4045   expand_expr_stmt (targetm.stack_protect_fail ());
4046   emit_label (label);
4047 }
4048 
4049 /* Start the RTL for a new function, and set variables used for
4050    emitting RTL.
4051    SUBR is the FUNCTION_DECL node.
4052    PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4053    the function's parameters, which must be run at any return statement.  */
4054 
4055 void
expand_function_start(tree subr)4056 expand_function_start (tree subr)
4057 {
4058   /* Make sure volatile mem refs aren't considered
4059      valid operands of arithmetic insns.  */
4060   init_recog_no_volatile ();
4061 
4062   current_function_profile
4063     = (profile_flag
4064        && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4065 
4066   current_function_limit_stack
4067     = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4068 
4069   /* Make the label for return statements to jump to.  Do not special
4070      case machines with special return instructions -- they will be
4071      handled later during jump, ifcvt, or epilogue creation.  */
4072   return_label = gen_label_rtx ();
4073 
4074   /* Initialize rtx used to return the value.  */
4075   /* Do this before assign_parms so that we copy the struct value address
4076      before any library calls that assign parms might generate.  */
4077 
4078   /* Decide whether to return the value in memory or in a register.  */
4079   if (aggregate_value_p (DECL_RESULT (subr), subr))
4080     {
4081       /* Returning something that won't go in a register.  */
4082       rtx value_address = 0;
4083 
4084 #ifdef PCC_STATIC_STRUCT_RETURN
4085       if (current_function_returns_pcc_struct)
4086 	{
4087 	  int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4088 	  value_address = assemble_static_space (size);
4089 	}
4090       else
4091 #endif
4092 	{
4093 	  rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4094 	  /* Expect to be passed the address of a place to store the value.
4095 	     If it is passed as an argument, assign_parms will take care of
4096 	     it.  */
4097 	  if (sv)
4098 	    {
4099 	      value_address = gen_reg_rtx (Pmode);
4100 	      emit_move_insn (value_address, sv);
4101 	    }
4102 	}
4103       if (value_address)
4104 	{
4105 	  rtx x = value_address;
4106 	  if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4107 	    {
4108 	      x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4109 	      set_mem_attributes (x, DECL_RESULT (subr), 1);
4110 	    }
4111 	  SET_DECL_RTL (DECL_RESULT (subr), x);
4112 	}
4113     }
4114   else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4115     /* If return mode is void, this decl rtl should not be used.  */
4116     SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4117   else
4118     {
4119       /* Compute the return values into a pseudo reg, which we will copy
4120 	 into the true return register after the cleanups are done.  */
4121       tree return_type = TREE_TYPE (DECL_RESULT (subr));
4122       if (TYPE_MODE (return_type) != BLKmode
4123 	  && targetm.calls.return_in_msb (return_type))
4124 	/* expand_function_end will insert the appropriate padding in
4125 	   this case.  Use the return value's natural (unpadded) mode
4126 	   within the function proper.  */
4127 	SET_DECL_RTL (DECL_RESULT (subr),
4128 		      gen_reg_rtx (TYPE_MODE (return_type)));
4129       else
4130 	{
4131 	  /* In order to figure out what mode to use for the pseudo, we
4132 	     figure out what the mode of the eventual return register will
4133 	     actually be, and use that.  */
4134 	  rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4135 
4136 	  /* Structures that are returned in registers are not
4137 	     aggregate_value_p, so we may see a PARALLEL or a REG.  */
4138 	  if (REG_P (hard_reg))
4139 	    SET_DECL_RTL (DECL_RESULT (subr),
4140 			  gen_reg_rtx (GET_MODE (hard_reg)));
4141 	  else
4142 	    {
4143 	      gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4144 	      SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4145 	    }
4146 	}
4147 
4148       /* Set DECL_REGISTER flag so that expand_function_end will copy the
4149 	 result to the real return register(s).  */
4150       DECL_REGISTER (DECL_RESULT (subr)) = 1;
4151     }
4152 
4153   /* Initialize rtx for parameters and local variables.
4154      In some cases this requires emitting insns.  */
4155   assign_parms (subr);
4156 
4157   /* If function gets a static chain arg, store it.  */
4158   if (cfun->static_chain_decl)
4159     {
4160       tree parm = cfun->static_chain_decl;
4161       rtx local = gen_reg_rtx (Pmode);
4162 
4163       set_decl_incoming_rtl (parm, static_chain_incoming_rtx);
4164       SET_DECL_RTL (parm, local);
4165       mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4166 
4167       emit_move_insn (local, static_chain_incoming_rtx);
4168     }
4169 
4170   /* If the function receives a non-local goto, then store the
4171      bits we need to restore the frame pointer.  */
4172   if (cfun->nonlocal_goto_save_area)
4173     {
4174       tree t_save;
4175       rtx r_save;
4176 
4177       /* ??? We need to do this save early.  Unfortunately here is
4178 	 before the frame variable gets declared.  Help out...  */
4179       expand_var (TREE_OPERAND (cfun->nonlocal_goto_save_area, 0));
4180 
4181       t_save = build4 (ARRAY_REF, ptr_type_node,
4182 		       cfun->nonlocal_goto_save_area,
4183 		       integer_zero_node, NULL_TREE, NULL_TREE);
4184       r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4185       r_save = convert_memory_address (Pmode, r_save);
4186 
4187       emit_move_insn (r_save, virtual_stack_vars_rtx);
4188       update_nonlocal_goto_save_area ();
4189     }
4190 
4191   /* The following was moved from init_function_start.
4192      The move is supposed to make sdb output more accurate.  */
4193   /* Indicate the beginning of the function body,
4194      as opposed to parm setup.  */
4195   emit_note (NOTE_INSN_FUNCTION_BEG);
4196 
4197   gcc_assert (NOTE_P (get_last_insn ()));
4198 
4199   parm_birth_insn = get_last_insn ();
4200 
4201   if (current_function_profile)
4202     {
4203 #ifdef PROFILE_HOOK
4204       PROFILE_HOOK (current_function_funcdef_no);
4205 #endif
4206     }
4207 
4208   /* After the display initializations is where the stack checking
4209      probe should go.  */
4210   if(flag_stack_check)
4211     stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
4212 
4213   /* Make sure there is a line number after the function entry setup code.  */
4214   force_next_line_note ();
4215 }
4216 
4217 /* Undo the effects of init_dummy_function_start.  */
4218 void
expand_dummy_function_end(void)4219 expand_dummy_function_end (void)
4220 {
4221   /* End any sequences that failed to be closed due to syntax errors.  */
4222   while (in_sequence_p ())
4223     end_sequence ();
4224 
4225   /* Outside function body, can't compute type's actual size
4226      until next function's body starts.  */
4227 
4228   free_after_parsing (cfun);
4229   free_after_compilation (cfun);
4230   cfun = 0;
4231 }
4232 
4233 /* Call DOIT for each hard register used as a return value from
4234    the current function.  */
4235 
4236 void
diddle_return_value(void (* doit)(rtx,void *),void * arg)4237 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4238 {
4239   rtx outgoing = current_function_return_rtx;
4240 
4241   if (! outgoing)
4242     return;
4243 
4244   if (REG_P (outgoing))
4245     (*doit) (outgoing, arg);
4246   else if (GET_CODE (outgoing) == PARALLEL)
4247     {
4248       int i;
4249 
4250       for (i = 0; i < XVECLEN (outgoing, 0); i++)
4251 	{
4252 	  rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4253 
4254 	  if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4255 	    (*doit) (x, arg);
4256 	}
4257     }
4258 }
4259 
4260 static void
do_clobber_return_reg(rtx reg,void * arg ATTRIBUTE_UNUSED)4261 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4262 {
4263   emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
4264 }
4265 
4266 void
clobber_return_register(void)4267 clobber_return_register (void)
4268 {
4269   diddle_return_value (do_clobber_return_reg, NULL);
4270 
4271   /* In case we do use pseudo to return value, clobber it too.  */
4272   if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4273     {
4274       tree decl_result = DECL_RESULT (current_function_decl);
4275       rtx decl_rtl = DECL_RTL (decl_result);
4276       if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4277 	{
4278 	  do_clobber_return_reg (decl_rtl, NULL);
4279 	}
4280     }
4281 }
4282 
4283 static void
do_use_return_reg(rtx reg,void * arg ATTRIBUTE_UNUSED)4284 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4285 {
4286   emit_insn (gen_rtx_USE (VOIDmode, reg));
4287 }
4288 
4289 static void
use_return_register(void)4290 use_return_register (void)
4291 {
4292   diddle_return_value (do_use_return_reg, NULL);
4293 }
4294 
4295 /* Possibly warn about unused parameters.  */
4296 void
do_warn_unused_parameter(tree fn)4297 do_warn_unused_parameter (tree fn)
4298 {
4299   tree decl;
4300 
4301   for (decl = DECL_ARGUMENTS (fn);
4302        decl; decl = TREE_CHAIN (decl))
4303     if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4304 	&& DECL_NAME (decl) && !DECL_ARTIFICIAL (decl))
4305       warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4306 }
4307 
4308 static GTY(()) rtx initial_trampoline;
4309 
4310 /* Generate RTL for the end of the current function.  */
4311 
4312 void
expand_function_end(void)4313 expand_function_end (void)
4314 {
4315   rtx clobber_after;
4316 
4317   /* If arg_pointer_save_area was referenced only from a nested
4318      function, we will not have initialized it yet.  Do that now.  */
4319   if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
4320     get_arg_pointer_save_area (cfun);
4321 
4322   /* If we are doing stack checking and this function makes calls,
4323      do a stack probe at the start of the function to ensure we have enough
4324      space for another stack frame.  */
4325   if (flag_stack_check && ! STACK_CHECK_BUILTIN)
4326     {
4327       rtx insn, seq;
4328 
4329       for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4330 	if (CALL_P (insn))
4331 	  {
4332 	    start_sequence ();
4333 	    probe_stack_range (STACK_CHECK_PROTECT,
4334 			       GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4335 	    seq = get_insns ();
4336 	    end_sequence ();
4337 	    emit_insn_before (seq, stack_check_probe_note);
4338 	    break;
4339 	  }
4340     }
4341 
4342   /* Possibly warn about unused parameters.
4343      When frontend does unit-at-a-time, the warning is already
4344      issued at finalization time.  */
4345   if (warn_unused_parameter
4346       && !lang_hooks.callgraph.expand_function)
4347     do_warn_unused_parameter (current_function_decl);
4348 
4349   /* End any sequences that failed to be closed due to syntax errors.  */
4350   while (in_sequence_p ())
4351     end_sequence ();
4352 
4353   clear_pending_stack_adjust ();
4354   do_pending_stack_adjust ();
4355 
4356   /* Mark the end of the function body.
4357      If control reaches this insn, the function can drop through
4358      without returning a value.  */
4359   emit_note (NOTE_INSN_FUNCTION_END);
4360 
4361   /* Must mark the last line number note in the function, so that the test
4362      coverage code can avoid counting the last line twice.  This just tells
4363      the code to ignore the immediately following line note, since there
4364      already exists a copy of this note somewhere above.  This line number
4365      note is still needed for debugging though, so we can't delete it.  */
4366   if (flag_test_coverage)
4367     emit_note (NOTE_INSN_REPEATED_LINE_NUMBER);
4368 
4369   /* Output a linenumber for the end of the function.
4370      SDB depends on this.  */
4371   force_next_line_note ();
4372   emit_line_note (input_location);
4373 
4374   /* Before the return label (if any), clobber the return
4375      registers so that they are not propagated live to the rest of
4376      the function.  This can only happen with functions that drop
4377      through; if there had been a return statement, there would
4378      have either been a return rtx, or a jump to the return label.
4379 
4380      We delay actual code generation after the current_function_value_rtx
4381      is computed.  */
4382   clobber_after = get_last_insn ();
4383 
4384   /* Output the label for the actual return from the function.  */
4385   emit_label (return_label);
4386 
4387 #ifdef TARGET_PROFILER_EPILOGUE
4388   if (current_function_profile && TARGET_PROFILER_EPILOGUE)
4389     {
4390       static rtx mexitcount_libfunc;
4391       static int initialized;
4392 
4393       if (!initialized)
4394 	{
4395 	  mexitcount_libfunc = init_one_libfunc (".mexitcount");
4396 	  initialized = 0;
4397 	}
4398       emit_library_call (mexitcount_libfunc, LCT_NORMAL, VOIDmode, 0);
4399     }
4400 #endif
4401 
4402   if (USING_SJLJ_EXCEPTIONS)
4403     {
4404       /* Let except.c know where it should emit the call to unregister
4405 	 the function context for sjlj exceptions.  */
4406       if (flag_exceptions)
4407 	sjlj_emit_function_exit_after (get_last_insn ());
4408     }
4409   else
4410     {
4411       /* @@@ This is a kludge.  We want to ensure that instructions that
4412 	 may trap are not moved into the epilogue by scheduling, because
4413 	 we don't always emit unwind information for the epilogue.
4414 	 However, not all machine descriptions define a blockage insn, so
4415 	 emit an ASM_INPUT to act as one.  */
4416       if (flag_non_call_exceptions)
4417 	emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
4418     }
4419 
4420   /* If this is an implementation of throw, do what's necessary to
4421      communicate between __builtin_eh_return and the epilogue.  */
4422   expand_eh_return ();
4423 
4424   /* If scalar return value was computed in a pseudo-reg, or was a named
4425      return value that got dumped to the stack, copy that to the hard
4426      return register.  */
4427   if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4428     {
4429       tree decl_result = DECL_RESULT (current_function_decl);
4430       rtx decl_rtl = DECL_RTL (decl_result);
4431 
4432       if (REG_P (decl_rtl)
4433 	  ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4434 	  : DECL_REGISTER (decl_result))
4435 	{
4436 	  rtx real_decl_rtl = current_function_return_rtx;
4437 
4438 	  /* This should be set in assign_parms.  */
4439 	  gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
4440 
4441 	  /* If this is a BLKmode structure being returned in registers,
4442 	     then use the mode computed in expand_return.  Note that if
4443 	     decl_rtl is memory, then its mode may have been changed,
4444 	     but that current_function_return_rtx has not.  */
4445 	  if (GET_MODE (real_decl_rtl) == BLKmode)
4446 	    PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
4447 
4448 	  /* If a non-BLKmode return value should be padded at the least
4449 	     significant end of the register, shift it left by the appropriate
4450 	     amount.  BLKmode results are handled using the group load/store
4451 	     machinery.  */
4452 	  if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
4453 	      && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
4454 	    {
4455 	      emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
4456 					   REGNO (real_decl_rtl)),
4457 			      decl_rtl);
4458 	      shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
4459 	    }
4460 	  /* If a named return value dumped decl_return to memory, then
4461 	     we may need to re-do the PROMOTE_MODE signed/unsigned
4462 	     extension.  */
4463 	  else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
4464 	    {
4465 	      int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
4466 
4467 	      if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4468 		promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4469 			      &unsignedp, 1);
4470 
4471 	      convert_move (real_decl_rtl, decl_rtl, unsignedp);
4472 	    }
4473 	  else if (GET_CODE (real_decl_rtl) == PARALLEL)
4474 	    {
4475 	      /* If expand_function_start has created a PARALLEL for decl_rtl,
4476 		 move the result to the real return registers.  Otherwise, do
4477 		 a group load from decl_rtl for a named return.  */
4478 	      if (GET_CODE (decl_rtl) == PARALLEL)
4479 		emit_group_move (real_decl_rtl, decl_rtl);
4480 	      else
4481 		emit_group_load (real_decl_rtl, decl_rtl,
4482 				 TREE_TYPE (decl_result),
4483 				 int_size_in_bytes (TREE_TYPE (decl_result)));
4484 	    }
4485 	  /* In the case of complex integer modes smaller than a word, we'll
4486 	     need to generate some non-trivial bitfield insertions.  Do that
4487 	     on a pseudo and not the hard register.  */
4488 	  else if (GET_CODE (decl_rtl) == CONCAT
4489 		   && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
4490 		   && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
4491 	    {
4492 	      int old_generating_concat_p;
4493 	      rtx tmp;
4494 
4495 	      old_generating_concat_p = generating_concat_p;
4496 	      generating_concat_p = 0;
4497 	      tmp = gen_reg_rtx (GET_MODE (decl_rtl));
4498 	      generating_concat_p = old_generating_concat_p;
4499 
4500 	      emit_move_insn (tmp, decl_rtl);
4501 	      emit_move_insn (real_decl_rtl, tmp);
4502 	    }
4503 	  else
4504 	    emit_move_insn (real_decl_rtl, decl_rtl);
4505 	}
4506     }
4507 
4508   /* If returning a structure, arrange to return the address of the value
4509      in a place where debuggers expect to find it.
4510 
4511      If returning a structure PCC style,
4512      the caller also depends on this value.
4513      And current_function_returns_pcc_struct is not necessarily set.  */
4514   if (current_function_returns_struct
4515       || current_function_returns_pcc_struct)
4516     {
4517       rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
4518       tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4519       rtx outgoing;
4520 
4521       if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
4522 	type = TREE_TYPE (type);
4523       else
4524 	value_address = XEXP (value_address, 0);
4525 
4526       outgoing = targetm.calls.function_value (build_pointer_type (type),
4527 					       current_function_decl, true);
4528 
4529       /* Mark this as a function return value so integrate will delete the
4530 	 assignment and USE below when inlining this function.  */
4531       REG_FUNCTION_VALUE_P (outgoing) = 1;
4532 
4533       /* The address may be ptr_mode and OUTGOING may be Pmode.  */
4534       value_address = convert_memory_address (GET_MODE (outgoing),
4535 					      value_address);
4536 
4537       emit_move_insn (outgoing, value_address);
4538 
4539       /* Show return register used to hold result (in this case the address
4540 	 of the result.  */
4541       current_function_return_rtx = outgoing;
4542     }
4543 
4544   /* Emit the actual code to clobber return register.  */
4545   {
4546     rtx seq;
4547 
4548     start_sequence ();
4549     clobber_return_register ();
4550     expand_naked_return ();
4551     seq = get_insns ();
4552     end_sequence ();
4553 
4554     emit_insn_after (seq, clobber_after);
4555   }
4556 
4557   /* Output the label for the naked return from the function.  */
4558   emit_label (naked_return_label);
4559 
4560   /* If stack protection is enabled for this function, check the guard.  */
4561   if (cfun->stack_protect_guard)
4562     stack_protect_epilogue ();
4563 
4564   /* If we had calls to alloca, and this machine needs
4565      an accurate stack pointer to exit the function,
4566      insert some code to save and restore the stack pointer.  */
4567   if (! EXIT_IGNORE_STACK
4568       && current_function_calls_alloca)
4569     {
4570       rtx tem = 0;
4571 
4572       emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4573       emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4574     }
4575 
4576   /* ??? This should no longer be necessary since stupid is no longer with
4577      us, but there are some parts of the compiler (eg reload_combine, and
4578      sh mach_dep_reorg) that still try and compute their own lifetime info
4579      instead of using the general framework.  */
4580   use_return_register ();
4581 }
4582 
4583 rtx
get_arg_pointer_save_area(struct function * f)4584 get_arg_pointer_save_area (struct function *f)
4585 {
4586   rtx ret = f->x_arg_pointer_save_area;
4587 
4588   if (! ret)
4589     {
4590       ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
4591       f->x_arg_pointer_save_area = ret;
4592     }
4593 
4594   if (f == cfun && ! f->arg_pointer_save_area_init)
4595     {
4596       rtx seq;
4597 
4598       /* Save the arg pointer at the beginning of the function.  The
4599 	 generated stack slot may not be a valid memory address, so we
4600 	 have to check it and fix it if necessary.  */
4601       start_sequence ();
4602       emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
4603       seq = get_insns ();
4604       end_sequence ();
4605 
4606       push_topmost_sequence ();
4607       emit_insn_after (seq, entry_of_function ());
4608       pop_topmost_sequence ();
4609     }
4610 
4611   return ret;
4612 }
4613 
4614 /* Extend a vector that records the INSN_UIDs of INSNS
4615    (a list of one or more insns).  */
4616 
4617 static void
record_insns(rtx insns,VEC (int,heap)** vecp)4618 record_insns (rtx insns, VEC(int,heap) **vecp)
4619 {
4620   rtx tmp;
4621 
4622   for (tmp = insns; tmp != NULL_RTX; tmp = NEXT_INSN (tmp))
4623     VEC_safe_push (int, heap, *vecp, INSN_UID (tmp));
4624 }
4625 
4626 /* Set the locator of the insn chain starting at INSN to LOC.  */
4627 static void
set_insn_locators(rtx insn,int loc)4628 set_insn_locators (rtx insn, int loc)
4629 {
4630   while (insn != NULL_RTX)
4631     {
4632       if (INSN_P (insn))
4633 	INSN_LOCATOR (insn) = loc;
4634       insn = NEXT_INSN (insn);
4635     }
4636 }
4637 
4638 /* Determine how many INSN_UIDs in VEC are part of INSN.  Because we can
4639    be running after reorg, SEQUENCE rtl is possible.  */
4640 
4641 static int
contains(rtx insn,VEC (int,heap)** vec)4642 contains (rtx insn, VEC(int,heap) **vec)
4643 {
4644   int i, j;
4645 
4646   if (NONJUMP_INSN_P (insn)
4647       && GET_CODE (PATTERN (insn)) == SEQUENCE)
4648     {
4649       int count = 0;
4650       for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4651 	for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4652 	  if (INSN_UID (XVECEXP (PATTERN (insn), 0, i))
4653 	      == VEC_index (int, *vec, j))
4654 	    count++;
4655       return count;
4656     }
4657   else
4658     {
4659       for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4660 	if (INSN_UID (insn) == VEC_index (int, *vec, j))
4661 	  return 1;
4662     }
4663   return 0;
4664 }
4665 
4666 int
prologue_epilogue_contains(rtx insn)4667 prologue_epilogue_contains (rtx insn)
4668 {
4669   if (contains (insn, &prologue))
4670     return 1;
4671   if (contains (insn, &epilogue))
4672     return 1;
4673   return 0;
4674 }
4675 
4676 int
sibcall_epilogue_contains(rtx insn)4677 sibcall_epilogue_contains (rtx insn)
4678 {
4679   if (sibcall_epilogue)
4680     return contains (insn, &sibcall_epilogue);
4681   return 0;
4682 }
4683 
4684 #ifdef HAVE_return
4685 /* Insert gen_return at the end of block BB.  This also means updating
4686    block_for_insn appropriately.  */
4687 
4688 static void
emit_return_into_block(basic_block bb,rtx line_note)4689 emit_return_into_block (basic_block bb, rtx line_note)
4690 {
4691   emit_jump_insn_after (gen_return (), BB_END (bb));
4692   if (line_note)
4693     emit_note_copy_after (line_note, PREV_INSN (BB_END (bb)));
4694 }
4695 #endif /* HAVE_return */
4696 
4697 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
4698 
4699 /* These functions convert the epilogue into a variant that does not
4700    modify the stack pointer.  This is used in cases where a function
4701    returns an object whose size is not known until it is computed.
4702    The called function leaves the object on the stack, leaves the
4703    stack depressed, and returns a pointer to the object.
4704 
4705    What we need to do is track all modifications and references to the
4706    stack pointer, deleting the modifications and changing the
4707    references to point to the location the stack pointer would have
4708    pointed to had the modifications taken place.
4709 
4710    These functions need to be portable so we need to make as few
4711    assumptions about the epilogue as we can.  However, the epilogue
4712    basically contains three things: instructions to reset the stack
4713    pointer, instructions to reload registers, possibly including the
4714    frame pointer, and an instruction to return to the caller.
4715 
4716    We must be sure of what a relevant epilogue insn is doing.  We also
4717    make no attempt to validate the insns we make since if they are
4718    invalid, we probably can't do anything valid.  The intent is that
4719    these routines get "smarter" as more and more machines start to use
4720    them and they try operating on different epilogues.
4721 
4722    We use the following structure to track what the part of the
4723    epilogue that we've already processed has done.  We keep two copies
4724    of the SP equivalence, one for use during the insn we are
4725    processing and one for use in the next insn.  The difference is
4726    because one part of a PARALLEL may adjust SP and the other may use
4727    it.  */
4728 
4729 struct epi_info
4730 {
4731   rtx sp_equiv_reg;		/* REG that SP is set from, perhaps SP.  */
4732   HOST_WIDE_INT sp_offset;	/* Offset from SP_EQUIV_REG of present SP.  */
4733   rtx new_sp_equiv_reg;		/* REG to be used at end of insn.  */
4734   HOST_WIDE_INT new_sp_offset;	/* Offset to be used at end of insn.  */
4735   rtx equiv_reg_src;		/* If nonzero, the value that SP_EQUIV_REG
4736 				   should be set to once we no longer need
4737 				   its value.  */
4738   rtx const_equiv[FIRST_PSEUDO_REGISTER]; /* Any known constant equivalences
4739 					     for registers.  */
4740 };
4741 
4742 static void handle_epilogue_set (rtx, struct epi_info *);
4743 static void update_epilogue_consts (rtx, rtx, void *);
4744 static void emit_equiv_load (struct epi_info *);
4745 
4746 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
4747    no modifications to the stack pointer.  Return the new list of insns.  */
4748 
4749 static rtx
keep_stack_depressed(rtx insns)4750 keep_stack_depressed (rtx insns)
4751 {
4752   int j;
4753   struct epi_info info;
4754   rtx insn, next;
4755 
4756   /* If the epilogue is just a single instruction, it must be OK as is.  */
4757   if (NEXT_INSN (insns) == NULL_RTX)
4758     return insns;
4759 
4760   /* Otherwise, start a sequence, initialize the information we have, and
4761      process all the insns we were given.  */
4762   start_sequence ();
4763 
4764   info.sp_equiv_reg = stack_pointer_rtx;
4765   info.sp_offset = 0;
4766   info.equiv_reg_src = 0;
4767 
4768   for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
4769     info.const_equiv[j] = 0;
4770 
4771   insn = insns;
4772   next = NULL_RTX;
4773   while (insn != NULL_RTX)
4774     {
4775       next = NEXT_INSN (insn);
4776 
4777       if (!INSN_P (insn))
4778 	{
4779 	  add_insn (insn);
4780 	  insn = next;
4781 	  continue;
4782 	}
4783 
4784       /* If this insn references the register that SP is equivalent to and
4785 	 we have a pending load to that register, we must force out the load
4786 	 first and then indicate we no longer know what SP's equivalent is.  */
4787       if (info.equiv_reg_src != 0
4788 	  && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
4789 	{
4790 	  emit_equiv_load (&info);
4791 	  info.sp_equiv_reg = 0;
4792 	}
4793 
4794       info.new_sp_equiv_reg = info.sp_equiv_reg;
4795       info.new_sp_offset = info.sp_offset;
4796 
4797       /* If this is a (RETURN) and the return address is on the stack,
4798 	 update the address and change to an indirect jump.  */
4799       if (GET_CODE (PATTERN (insn)) == RETURN
4800 	  || (GET_CODE (PATTERN (insn)) == PARALLEL
4801 	      && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
4802 	{
4803 	  rtx retaddr = INCOMING_RETURN_ADDR_RTX;
4804 	  rtx base = 0;
4805 	  HOST_WIDE_INT offset = 0;
4806 	  rtx jump_insn, jump_set;
4807 
4808 	  /* If the return address is in a register, we can emit the insn
4809 	     unchanged.  Otherwise, it must be a MEM and we see what the
4810 	     base register and offset are.  In any case, we have to emit any
4811 	     pending load to the equivalent reg of SP, if any.  */
4812 	  if (REG_P (retaddr))
4813 	    {
4814 	      emit_equiv_load (&info);
4815 	      add_insn (insn);
4816 	      insn = next;
4817 	      continue;
4818 	    }
4819 	  else
4820 	    {
4821 	      rtx ret_ptr;
4822 	      gcc_assert (MEM_P (retaddr));
4823 
4824 	      ret_ptr = XEXP (retaddr, 0);
4825 
4826 	      if (REG_P (ret_ptr))
4827 		{
4828 		  base = gen_rtx_REG (Pmode, REGNO (ret_ptr));
4829 		  offset = 0;
4830 		}
4831 	      else
4832 		{
4833 		  gcc_assert (GET_CODE (ret_ptr) == PLUS
4834 			      && REG_P (XEXP (ret_ptr, 0))
4835 			      && GET_CODE (XEXP (ret_ptr, 1)) == CONST_INT);
4836 		  base = gen_rtx_REG (Pmode, REGNO (XEXP (ret_ptr, 0)));
4837 		  offset = INTVAL (XEXP (ret_ptr, 1));
4838 		}
4839 	    }
4840 
4841 	  /* If the base of the location containing the return pointer
4842 	     is SP, we must update it with the replacement address.  Otherwise,
4843 	     just build the necessary MEM.  */
4844 	  retaddr = plus_constant (base, offset);
4845 	  if (base == stack_pointer_rtx)
4846 	    retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
4847 					    plus_constant (info.sp_equiv_reg,
4848 							   info.sp_offset));
4849 
4850 	  retaddr = gen_rtx_MEM (Pmode, retaddr);
4851 	  MEM_NOTRAP_P (retaddr) = 1;
4852 
4853 	  /* If there is a pending load to the equivalent register for SP
4854 	     and we reference that register, we must load our address into
4855 	     a scratch register and then do that load.  */
4856 	  if (info.equiv_reg_src
4857 	      && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
4858 	    {
4859 	      unsigned int regno;
4860 	      rtx reg;
4861 
4862 	      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4863 		if (HARD_REGNO_MODE_OK (regno, Pmode)
4864 		    && !fixed_regs[regno]
4865 		    && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
4866 		    && !REGNO_REG_SET_P
4867 		         (EXIT_BLOCK_PTR->il.rtl->global_live_at_start, regno)
4868 		    && !refers_to_regno_p (regno,
4869 					   regno + hard_regno_nregs[regno]
4870 								   [Pmode],
4871 					   info.equiv_reg_src, NULL)
4872 		    && info.const_equiv[regno] == 0)
4873 		  break;
4874 
4875 	      gcc_assert (regno < FIRST_PSEUDO_REGISTER);
4876 
4877 	      reg = gen_rtx_REG (Pmode, regno);
4878 	      emit_move_insn (reg, retaddr);
4879 	      retaddr = reg;
4880 	    }
4881 
4882 	  emit_equiv_load (&info);
4883 	  jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
4884 
4885 	  /* Show the SET in the above insn is a RETURN.  */
4886 	  jump_set = single_set (jump_insn);
4887 	  gcc_assert (jump_set);
4888 	  SET_IS_RETURN_P (jump_set) = 1;
4889 	}
4890 
4891       /* If SP is not mentioned in the pattern and its equivalent register, if
4892 	 any, is not modified, just emit it.  Otherwise, if neither is set,
4893 	 replace the reference to SP and emit the insn.  If none of those are
4894 	 true, handle each SET individually.  */
4895       else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
4896 	       && (info.sp_equiv_reg == stack_pointer_rtx
4897 		   || !reg_set_p (info.sp_equiv_reg, insn)))
4898 	add_insn (insn);
4899       else if (! reg_set_p (stack_pointer_rtx, insn)
4900 	       && (info.sp_equiv_reg == stack_pointer_rtx
4901 		   || !reg_set_p (info.sp_equiv_reg, insn)))
4902 	{
4903 	  int changed;
4904 
4905 	  changed = validate_replace_rtx (stack_pointer_rtx,
4906 					  plus_constant (info.sp_equiv_reg,
4907 							 info.sp_offset),
4908 					  insn);
4909 	  gcc_assert (changed);
4910 
4911 	  add_insn (insn);
4912 	}
4913       else if (GET_CODE (PATTERN (insn)) == SET)
4914 	handle_epilogue_set (PATTERN (insn), &info);
4915       else if (GET_CODE (PATTERN (insn)) == PARALLEL)
4916 	{
4917 	  for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
4918 	    if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
4919 	      handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
4920 	}
4921       else
4922 	add_insn (insn);
4923 
4924       info.sp_equiv_reg = info.new_sp_equiv_reg;
4925       info.sp_offset = info.new_sp_offset;
4926 
4927       /* Now update any constants this insn sets.  */
4928       note_stores (PATTERN (insn), update_epilogue_consts, &info);
4929       insn = next;
4930     }
4931 
4932   insns = get_insns ();
4933   end_sequence ();
4934   return insns;
4935 }
4936 
4937 /* SET is a SET from an insn in the epilogue.  P is a pointer to the epi_info
4938    structure that contains information about what we've seen so far.  We
4939    process this SET by either updating that data or by emitting one or
4940    more insns.  */
4941 
4942 static void
handle_epilogue_set(rtx set,struct epi_info * p)4943 handle_epilogue_set (rtx set, struct epi_info *p)
4944 {
4945   /* First handle the case where we are setting SP.  Record what it is being
4946      set from, which we must be able to determine  */
4947   if (reg_set_p (stack_pointer_rtx, set))
4948     {
4949       gcc_assert (SET_DEST (set) == stack_pointer_rtx);
4950 
4951       if (GET_CODE (SET_SRC (set)) == PLUS)
4952 	{
4953 	  p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
4954 	  if (GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
4955 	    p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
4956 	  else
4957 	    {
4958 	      gcc_assert (REG_P (XEXP (SET_SRC (set), 1))
4959 			  && (REGNO (XEXP (SET_SRC (set), 1))
4960 			      < FIRST_PSEUDO_REGISTER)
4961 			  && p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4962 	      p->new_sp_offset
4963 		= INTVAL (p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4964 	    }
4965 	}
4966       else
4967 	p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
4968 
4969       /* If we are adjusting SP, we adjust from the old data.  */
4970       if (p->new_sp_equiv_reg == stack_pointer_rtx)
4971 	{
4972 	  p->new_sp_equiv_reg = p->sp_equiv_reg;
4973 	  p->new_sp_offset += p->sp_offset;
4974 	}
4975 
4976       gcc_assert (p->new_sp_equiv_reg && REG_P (p->new_sp_equiv_reg));
4977 
4978       return;
4979     }
4980 
4981   /* Next handle the case where we are setting SP's equivalent
4982      register.  We must not already have a value to set it to.  We
4983      could update, but there seems little point in handling that case.
4984      Note that we have to allow for the case where we are setting the
4985      register set in the previous part of a PARALLEL inside a single
4986      insn.  But use the old offset for any updates within this insn.
4987      We must allow for the case where the register is being set in a
4988      different (usually wider) mode than Pmode).  */
4989   else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
4990     {
4991       gcc_assert (!p->equiv_reg_src
4992 		  && REG_P (p->new_sp_equiv_reg)
4993 		  && REG_P (SET_DEST (set))
4994 		  && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set)))
4995 		      <= BITS_PER_WORD)
4996 		  && REGNO (p->new_sp_equiv_reg) == REGNO (SET_DEST (set)));
4997       p->equiv_reg_src
4998 	= simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
4999 				plus_constant (p->sp_equiv_reg,
5000 					       p->sp_offset));
5001     }
5002 
5003   /* Otherwise, replace any references to SP in the insn to its new value
5004      and emit the insn.  */
5005   else
5006     {
5007       SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
5008 					    plus_constant (p->sp_equiv_reg,
5009 							   p->sp_offset));
5010       SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
5011 					     plus_constant (p->sp_equiv_reg,
5012 							    p->sp_offset));
5013       emit_insn (set);
5014     }
5015 }
5016 
5017 /* Update the tracking information for registers set to constants.  */
5018 
5019 static void
update_epilogue_consts(rtx dest,rtx x,void * data)5020 update_epilogue_consts (rtx dest, rtx x, void *data)
5021 {
5022   struct epi_info *p = (struct epi_info *) data;
5023   rtx new;
5024 
5025   if (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5026     return;
5027 
5028   /* If we are either clobbering a register or doing a partial set,
5029      show we don't know the value.  */
5030   else if (GET_CODE (x) == CLOBBER || ! rtx_equal_p (dest, SET_DEST (x)))
5031     p->const_equiv[REGNO (dest)] = 0;
5032 
5033   /* If we are setting it to a constant, record that constant.  */
5034   else if (GET_CODE (SET_SRC (x)) == CONST_INT)
5035     p->const_equiv[REGNO (dest)] = SET_SRC (x);
5036 
5037   /* If this is a binary operation between a register we have been tracking
5038      and a constant, see if we can compute a new constant value.  */
5039   else if (ARITHMETIC_P (SET_SRC (x))
5040 	   && REG_P (XEXP (SET_SRC (x), 0))
5041 	   && REGNO (XEXP (SET_SRC (x), 0)) < FIRST_PSEUDO_REGISTER
5042 	   && p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))] != 0
5043 	   && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
5044 	   && 0 != (new = simplify_binary_operation
5045 		    (GET_CODE (SET_SRC (x)), GET_MODE (dest),
5046 		     p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))],
5047 		     XEXP (SET_SRC (x), 1)))
5048 	   && GET_CODE (new) == CONST_INT)
5049     p->const_equiv[REGNO (dest)] = new;
5050 
5051   /* Otherwise, we can't do anything with this value.  */
5052   else
5053     p->const_equiv[REGNO (dest)] = 0;
5054 }
5055 
5056 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed.  */
5057 
5058 static void
emit_equiv_load(struct epi_info * p)5059 emit_equiv_load (struct epi_info *p)
5060 {
5061   if (p->equiv_reg_src != 0)
5062     {
5063       rtx dest = p->sp_equiv_reg;
5064 
5065       if (GET_MODE (p->equiv_reg_src) != GET_MODE (dest))
5066 	dest = gen_rtx_REG (GET_MODE (p->equiv_reg_src),
5067 			    REGNO (p->sp_equiv_reg));
5068 
5069       emit_move_insn (dest, p->equiv_reg_src);
5070       p->equiv_reg_src = 0;
5071     }
5072 }
5073 #endif
5074 
5075 /* APPLE LOCAL begin radar 6163705, Blocks prologues  */
5076 
5077 /* The function should only be called for Blocks functions.
5078 
5079    On being called, the main instruction list for the Blocks function
5080    may contain instructions for setting up the ref_decl and byref_decl
5081    variables in the Block.  Those isns really need to go before the
5082    function prologue note rather than after.  If such instructions are
5083    present, they are identifiable by their source line number, which
5084    will be one line preceding the declaration of the function.  If
5085    they are present, there will also be a source line note instruction
5086    for that line.
5087 
5088    This function does a set of things:
5089    - It finds the first such prologue insn.
5090    - It finds the last such prologue insn.
5091    - It changes the insn locator of all such prologue insns to
5092      the prologue locator.
5093    - It finds the source line note for the bogus location and
5094      removes it.
5095    - It decides if it is safe to place the prolgoue end note
5096      after the last prologue insn it finds, and if so, returns
5097      the last prologue insn (otherwise it returns NULL).
5098 
5099    This function makes the following checks to determine if it is
5100    safe to move the prologue end note to just below the last
5101    prologue insn it finds.  If ALL of the checks succeed then it
5102    is safe.  If any check fails, this function returns NULL.  The
5103    checks it makes are:
5104 
5105 	- There were no INSN_P instructions that occurred before the
5106 	  first prologue insn.
5107 	- If there are any non-prologue insns between the first & last
5108 	  prologue insn, the non-prologue insns do not outnumber the
5109 	 prologue insns.
5110 	- The first prologue insn & the last prologue insn are in the
5111 	  same basic block.
5112 */
5113 
5114 static rtx
find_block_prologue_insns(void)5115 find_block_prologue_insns (void)
5116 {
5117   rtx first_prologue_insn = NULL;
5118   rtx last_prologue_insn = NULL;
5119   rtx line_number_note = NULL;
5120   rtx tmp_insn;
5121   int num_prologue_insns = 0;
5122   int total_insns = 0;
5123   int prologue_line = DECL_SOURCE_LINE (cfun->decl) - 1;
5124   bool other_insns_before_prologue = false;
5125   bool start_of_fnbody_found = false;
5126 
5127   /* Go through all the insns and find the first prologue insn, the
5128      last prologue insn, the source line location note, and whether or
5129      not there are any "real" insns that occur before the first
5130      prologue insn.  Re-set the insn locator for prologue insns to the
5131      prologue locator.  */
5132 
5133   for (tmp_insn = get_insns(); tmp_insn; tmp_insn = NEXT_INSN (tmp_insn))
5134     {
5135       if (INSN_P (tmp_insn))
5136 	{
5137 	  if (insn_line (tmp_insn) == prologue_line)
5138 	    {
5139 	      if (!first_prologue_insn)
5140 		first_prologue_insn = tmp_insn;
5141 	      num_prologue_insns++;
5142 	      last_prologue_insn = tmp_insn;
5143 	      INSN_LOCATOR (tmp_insn) = prologue_locator;
5144 	    }
5145 	  else if (!first_prologue_insn
5146 		   && start_of_fnbody_found)
5147 	    other_insns_before_prologue = true;
5148 	}
5149       else if (NOTE_P (tmp_insn)
5150 	       && NOTE_LINE_NUMBER (tmp_insn) == NOTE_INSN_FUNCTION_BEG)
5151 	start_of_fnbody_found = true;
5152       else if (NOTE_P (tmp_insn)
5153 	       && (XINT (tmp_insn, 5) == prologue_line))
5154 	line_number_note = tmp_insn;
5155     }
5156 
5157   /* If there were no prologue insns, return now.  */
5158 
5159   if (!first_prologue_insn)
5160     return NULL;
5161 
5162   /* If the source location note for the line before the beginning of the
5163      function was found, remove it.  */
5164 
5165   if (line_number_note)
5166     remove_insn (line_number_note);
5167 
5168   /* If other real insns got moved above the prologue insns, we can't
5169      pull out the prologue insns, so return now.  */
5170 
5171   if (other_insns_before_prologue && (optimize > 0))
5172     return NULL;
5173 
5174   /* Count the number of insns between the first prologue insn and the
5175      last prologue insn; also count the number of non-prologue insns
5176      between the first prologue insn and the last prologue insn.  */
5177 
5178   tmp_insn = first_prologue_insn;
5179   while (tmp_insn != last_prologue_insn)
5180     {
5181       total_insns++;
5182       tmp_insn = NEXT_INSN (tmp_insn);
5183     }
5184   total_insns++;
5185 
5186   /* If more than half of the insns between the first & last prologue
5187      insns are not prologue insns, then there is too much code that
5188      got moved in between prologue insns (by optimizations), so we
5189      will not try to pull it out.  */
5190 
5191   if ((num_prologue_insns * 2) <= total_insns)
5192     return NULL;
5193 
5194   /* Make sure all the prologue insns are within one basic block.
5195      If the insns cross a basic block boundary, then there is a chance
5196      that moving them will cause incorrect code, so don't do it.  */
5197 
5198   gcc_assert (first_prologue_insn != NULL);
5199   gcc_assert (last_prologue_insn != NULL);
5200 
5201   if (BLOCK_FOR_INSN (first_prologue_insn) !=
5202       BLOCK_FOR_INSN (last_prologue_insn))
5203     return NULL;
5204 
5205   return last_prologue_insn;
5206 }
5207 /* APPLE LOCAL end radar 6163705, Blocks prologues  */
5208 
5209 /* Generate the prologue and epilogue RTL if the machine supports it.  Thread
5210    this into place with notes indicating where the prologue ends and where
5211    the epilogue begins.  Update the basic block information when possible.  */
5212 
5213 void
thread_prologue_and_epilogue_insns(rtx f ATTRIBUTE_UNUSED)5214 thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED)
5215 {
5216   int inserted = 0;
5217   edge e;
5218 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
5219   rtx seq;
5220 #endif
5221 #ifdef HAVE_prologue
5222   rtx prologue_end = NULL_RTX;
5223 #endif
5224 #if defined (HAVE_epilogue) || defined(HAVE_return)
5225   rtx epilogue_end = NULL_RTX;
5226 #endif
5227   edge_iterator ei;
5228 
5229 #ifdef HAVE_prologue
5230   if (HAVE_prologue)
5231     {
5232       /* APPLE LOCAL begin radar 6163705, Blocks prologues  */
5233       rtx last_prologue_insn = NULL;
5234 
5235       if (BLOCK_SYNTHESIZED_FUNC (cfun->decl))
5236 	last_prologue_insn = find_block_prologue_insns();
5237       /* APPLE LOCAL end radar 6163705, Blocks prologues  */
5238 
5239       start_sequence ();
5240       seq = gen_prologue ();
5241       emit_insn (seq);
5242 
5243       /* Retain a map of the prologue insns.  */
5244       record_insns (seq, &prologue);
5245       /* APPLE LOCAL begin radar 6163705, Blocks prologues  */
5246       if (!last_prologue_insn)
5247 	prologue_end = emit_note (NOTE_INSN_PROLOGUE_END);
5248       /* APPLE LOCAL end radar 6163705, Blocks prologues  */
5249 
5250 #ifndef PROFILE_BEFORE_PROLOGUE
5251       /* Ensure that instructions are not moved into the prologue when
5252 	 profiling is on.  The call to the profiling routine can be
5253 	 emitted within the live range of a call-clobbered register.  */
5254       if (current_function_profile)
5255 	emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
5256 #endif
5257 
5258       seq = get_insns ();
5259       end_sequence ();
5260       set_insn_locators (seq, prologue_locator);
5261 
5262       /* Can't deal with multiple successors of the entry block
5263          at the moment.  Function should always have at least one
5264          entry point.  */
5265       gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
5266 
5267       insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
5268       inserted = 1;
5269 
5270       /* APPLE LOCAL begin radar 6163705, Blocks prologues  */
5271       if (last_prologue_insn)
5272 	emit_note_after (NOTE_INSN_PROLOGUE_END, last_prologue_insn);
5273       /* APPLE LOCAL end radar 6163705, Blocks prologues  */    }
5274 #endif
5275 
5276   /* If the exit block has no non-fake predecessors, we don't need
5277      an epilogue.  */
5278   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5279     if ((e->flags & EDGE_FAKE) == 0)
5280       break;
5281   if (e == NULL)
5282     goto epilogue_done;
5283 
5284 #ifdef HAVE_return
5285   if (optimize && HAVE_return)
5286     {
5287       /* If we're allowed to generate a simple return instruction,
5288 	 then by definition we don't need a full epilogue.  Examine
5289 	 the block that falls through to EXIT.   If it does not
5290 	 contain any code, examine its predecessors and try to
5291 	 emit (conditional) return instructions.  */
5292 
5293       basic_block last;
5294       rtx label;
5295 
5296       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5297 	if (e->flags & EDGE_FALLTHRU)
5298 	  break;
5299       if (e == NULL)
5300 	goto epilogue_done;
5301       last = e->src;
5302 
5303       /* Verify that there are no active instructions in the last block.  */
5304       label = BB_END (last);
5305       while (label && !LABEL_P (label))
5306 	{
5307 	  if (active_insn_p (label))
5308 	    break;
5309 	  label = PREV_INSN (label);
5310 	}
5311 
5312       if (BB_HEAD (last) == label && LABEL_P (label))
5313 	{
5314 	  edge_iterator ei2;
5315 	  rtx epilogue_line_note = NULL_RTX;
5316 
5317 	  /* Locate the line number associated with the closing brace,
5318 	     if we can find one.  */
5319 	  for (seq = get_last_insn ();
5320 	       seq && ! active_insn_p (seq);
5321 	       seq = PREV_INSN (seq))
5322 	    if (NOTE_P (seq) && NOTE_LINE_NUMBER (seq) > 0)
5323 	      {
5324 		epilogue_line_note = seq;
5325 		break;
5326 	      }
5327 
5328 	  for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
5329 	    {
5330 	      basic_block bb = e->src;
5331 	      rtx jump;
5332 
5333 	      if (bb == ENTRY_BLOCK_PTR)
5334 		{
5335 		  ei_next (&ei2);
5336 		  continue;
5337 		}
5338 
5339 	      jump = BB_END (bb);
5340 	      if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5341 		{
5342 		  ei_next (&ei2);
5343 		  continue;
5344 		}
5345 
5346 	      /* If we have an unconditional jump, we can replace that
5347 		 with a simple return instruction.  */
5348 	      if (simplejump_p (jump))
5349 		{
5350 		  emit_return_into_block (bb, epilogue_line_note);
5351 		  delete_insn (jump);
5352 		}
5353 
5354 	      /* If we have a conditional jump, we can try to replace
5355 		 that with a conditional return instruction.  */
5356 	      else if (condjump_p (jump))
5357 		{
5358 		  if (! redirect_jump (jump, 0, 0))
5359 		    {
5360 		      ei_next (&ei2);
5361 		      continue;
5362 		    }
5363 
5364 		  /* If this block has only one successor, it both jumps
5365 		     and falls through to the fallthru block, so we can't
5366 		     delete the edge.  */
5367 		  if (single_succ_p (bb))
5368 		    {
5369 		      ei_next (&ei2);
5370 		      continue;
5371 		    }
5372 		}
5373 	      else
5374 		{
5375 		  ei_next (&ei2);
5376 		  continue;
5377 		}
5378 
5379 	      /* Fix up the CFG for the successful change we just made.  */
5380 	      redirect_edge_succ (e, EXIT_BLOCK_PTR);
5381 	    }
5382 
5383 	  /* Emit a return insn for the exit fallthru block.  Whether
5384 	     this is still reachable will be determined later.  */
5385 
5386 	  emit_barrier_after (BB_END (last));
5387 	  emit_return_into_block (last, epilogue_line_note);
5388 	  epilogue_end = BB_END (last);
5389 	  single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
5390 	  goto epilogue_done;
5391 	}
5392     }
5393 #endif
5394   /* Find the edge that falls through to EXIT.  Other edges may exist
5395      due to RETURN instructions, but those don't need epilogues.
5396      There really shouldn't be a mixture -- either all should have
5397      been converted or none, however...  */
5398 
5399   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5400     if (e->flags & EDGE_FALLTHRU)
5401       break;
5402   if (e == NULL)
5403     goto epilogue_done;
5404 
5405 #ifdef HAVE_epilogue
5406   if (HAVE_epilogue)
5407     {
5408       start_sequence ();
5409       epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5410 
5411       seq = gen_epilogue ();
5412 
5413 #ifdef INCOMING_RETURN_ADDR_RTX
5414       /* If this function returns with the stack depressed and we can support
5415 	 it, massage the epilogue to actually do that.  */
5416       if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5417 	  && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
5418 	seq = keep_stack_depressed (seq);
5419 #endif
5420 
5421       emit_jump_insn (seq);
5422 
5423       /* Retain a map of the epilogue insns.  */
5424       record_insns (seq, &epilogue);
5425       set_insn_locators (seq, epilogue_locator);
5426 
5427       seq = get_insns ();
5428       end_sequence ();
5429 
5430       insert_insn_on_edge (seq, e);
5431       inserted = 1;
5432     }
5433   else
5434 #endif
5435     {
5436       basic_block cur_bb;
5437 
5438       if (! next_active_insn (BB_END (e->src)))
5439 	goto epilogue_done;
5440       /* We have a fall-through edge to the exit block, the source is not
5441          at the end of the function, and there will be an assembler epilogue
5442          at the end of the function.
5443          We can't use force_nonfallthru here, because that would try to
5444          use return.  Inserting a jump 'by hand' is extremely messy, so
5445 	 we take advantage of cfg_layout_finalize using
5446 	fixup_fallthru_exit_predecessor.  */
5447       cfg_layout_initialize (0);
5448       FOR_EACH_BB (cur_bb)
5449 	if (cur_bb->index >= NUM_FIXED_BLOCKS
5450 	    && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
5451 	  cur_bb->aux = cur_bb->next_bb;
5452       cfg_layout_finalize ();
5453     }
5454 epilogue_done:
5455 
5456   if (inserted)
5457     commit_edge_insertions ();
5458 
5459 #ifdef HAVE_sibcall_epilogue
5460   /* Emit sibling epilogues before any sibling call sites.  */
5461   for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
5462     {
5463       basic_block bb = e->src;
5464       rtx insn = BB_END (bb);
5465 
5466       if (!CALL_P (insn)
5467 	  || ! SIBLING_CALL_P (insn))
5468 	{
5469 	  ei_next (&ei);
5470 	  continue;
5471 	}
5472 
5473       start_sequence ();
5474       emit_insn (gen_sibcall_epilogue ());
5475       seq = get_insns ();
5476       end_sequence ();
5477 
5478       /* Retain a map of the epilogue insns.  Used in life analysis to
5479 	 avoid getting rid of sibcall epilogue insns.  Do this before we
5480 	 actually emit the sequence.  */
5481       record_insns (seq, &sibcall_epilogue);
5482       set_insn_locators (seq, epilogue_locator);
5483 
5484       emit_insn_before (seq, insn);
5485       ei_next (&ei);
5486     }
5487 #endif
5488 
5489 #ifdef HAVE_prologue
5490   /* This is probably all useless now that we use locators.  */
5491   if (prologue_end)
5492     {
5493       rtx insn, prev;
5494 
5495       /* GDB handles `break f' by setting a breakpoint on the first
5496 	 line note after the prologue.  Which means (1) that if
5497 	 there are line number notes before where we inserted the
5498 	 prologue we should move them, and (2) we should generate a
5499 	 note before the end of the first basic block, if there isn't
5500 	 one already there.
5501 
5502 	 ??? This behavior is completely broken when dealing with
5503 	 multiple entry functions.  We simply place the note always
5504 	 into first basic block and let alternate entry points
5505 	 to be missed.
5506        */
5507 
5508       for (insn = prologue_end; insn; insn = prev)
5509 	{
5510 	  prev = PREV_INSN (insn);
5511 	  if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5512 	    {
5513 	      /* Note that we cannot reorder the first insn in the
5514 		 chain, since rest_of_compilation relies on that
5515 		 remaining constant.  */
5516 	      if (prev == NULL)
5517 		break;
5518 	      reorder_insns (insn, insn, prologue_end);
5519 	    }
5520 	}
5521 
5522       /* Find the last line number note in the first block.  */
5523       for (insn = BB_END (ENTRY_BLOCK_PTR->next_bb);
5524 	   insn != prologue_end && insn;
5525 	   insn = PREV_INSN (insn))
5526 	if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5527 	  break;
5528 
5529       /* If we didn't find one, make a copy of the first line number
5530 	 we run across.  */
5531       if (! insn)
5532 	{
5533 	  for (insn = next_active_insn (prologue_end);
5534 	       insn;
5535 	       insn = PREV_INSN (insn))
5536 	    if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5537 	      {
5538 		emit_note_copy_after (insn, prologue_end);
5539 		break;
5540 	      }
5541 	}
5542     }
5543 #endif
5544 #ifdef HAVE_epilogue
5545   if (epilogue_end)
5546     {
5547       rtx insn, next;
5548 
5549       /* Similarly, move any line notes that appear after the epilogue.
5550          There is no need, however, to be quite so anal about the existence
5551 	 of such a note.  Also move the NOTE_INSN_FUNCTION_END and (possibly)
5552 	 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5553 	 info generation.  */
5554       for (insn = epilogue_end; insn; insn = next)
5555 	{
5556 	  next = NEXT_INSN (insn);
5557 	  if (NOTE_P (insn)
5558 	      && (NOTE_LINE_NUMBER (insn) > 0
5559 		  || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG
5560 		  || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END))
5561 	    reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5562 	}
5563     }
5564 #endif
5565 }
5566 
5567 /* Reposition the prologue-end and epilogue-begin notes after instruction
5568    scheduling and delayed branch scheduling.  */
5569 
5570 void
reposition_prologue_and_epilogue_notes(rtx f ATTRIBUTE_UNUSED)5571 reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED)
5572 {
5573 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5574   rtx insn, last, note;
5575   int len;
5576 
5577   if ((len = VEC_length (int, prologue)) > 0)
5578     {
5579       last = 0, note = 0;
5580 
5581       /* Scan from the beginning until we reach the last prologue insn.
5582 	 We apparently can't depend on basic_block_{head,end} after
5583 	 reorg has run.  */
5584       for (insn = f; insn; insn = NEXT_INSN (insn))
5585 	{
5586 	  if (NOTE_P (insn))
5587 	    {
5588 	      if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
5589 		note = insn;
5590 	    }
5591 	  else if (contains (insn, &prologue))
5592 	    {
5593 	      last = insn;
5594 	      if (--len == 0)
5595 		break;
5596 	    }
5597 	}
5598 
5599       if (last)
5600 	{
5601 	  /* Find the prologue-end note if we haven't already, and
5602 	     move it to just after the last prologue insn.  */
5603 	  if (note == 0)
5604 	    {
5605 	      for (note = last; (note = NEXT_INSN (note));)
5606 		if (NOTE_P (note)
5607 		    && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
5608 		  break;
5609 	    }
5610 
5611 	  /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note.  */
5612 	  if (LABEL_P (last))
5613 	    last = NEXT_INSN (last);
5614 	  reorder_insns (note, note, last);
5615 	}
5616     }
5617 
5618   if ((len = VEC_length (int, epilogue)) > 0)
5619     {
5620       last = 0, note = 0;
5621 
5622       /* Scan from the end until we reach the first epilogue insn.
5623 	 We apparently can't depend on basic_block_{head,end} after
5624 	 reorg has run.  */
5625       for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
5626 	{
5627 	  if (NOTE_P (insn))
5628 	    {
5629 	      if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
5630 		note = insn;
5631 	    }
5632 	  else if (contains (insn, &epilogue))
5633 	    {
5634 	      last = insn;
5635 	      if (--len == 0)
5636 		break;
5637 	    }
5638 	}
5639 
5640       if (last)
5641 	{
5642 	  /* Find the epilogue-begin note if we haven't already, and
5643 	     move it to just before the first epilogue insn.  */
5644 	  if (note == 0)
5645 	    {
5646 	      for (note = insn; (note = PREV_INSN (note));)
5647 		if (NOTE_P (note)
5648 		    && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
5649 		  break;
5650 	    }
5651 
5652 	  if (PREV_INSN (last) != note)
5653 	    reorder_insns (note, note, PREV_INSN (last));
5654 	}
5655     }
5656 #endif /* HAVE_prologue or HAVE_epilogue */
5657 }
5658 
5659 /* Resets insn_block_boundaries array.  */
5660 
5661 void
reset_block_changes(void)5662 reset_block_changes (void)
5663 {
5664   cfun->ib_boundaries_block = VEC_alloc (tree, gc, 100);
5665   VEC_quick_push (tree, cfun->ib_boundaries_block, NULL_TREE);
5666 }
5667 
5668 /* Record the boundary for BLOCK.  */
5669 void
record_block_change(tree block)5670 record_block_change (tree block)
5671 {
5672   int i, n;
5673   tree last_block;
5674 
5675   if (!block)
5676     return;
5677 
5678   if(!cfun->ib_boundaries_block)
5679     return;
5680 
5681   last_block = VEC_pop (tree, cfun->ib_boundaries_block);
5682   n = get_max_uid ();
5683   for (i = VEC_length (tree, cfun->ib_boundaries_block); i < n; i++)
5684     VEC_safe_push (tree, gc, cfun->ib_boundaries_block, last_block);
5685 
5686   VEC_safe_push (tree, gc, cfun->ib_boundaries_block, block);
5687 }
5688 
5689 /* Finishes record of boundaries.  */
5690 void
finalize_block_changes(void)5691 finalize_block_changes (void)
5692 {
5693   record_block_change (DECL_INITIAL (current_function_decl));
5694 }
5695 
5696 /* For INSN return the BLOCK it belongs to.  */
5697 void
check_block_change(rtx insn,tree * block)5698 check_block_change (rtx insn, tree *block)
5699 {
5700   unsigned uid = INSN_UID (insn);
5701 
5702   if (uid >= VEC_length (tree, cfun->ib_boundaries_block))
5703     return;
5704 
5705   *block = VEC_index (tree, cfun->ib_boundaries_block, uid);
5706 }
5707 
5708 /* Releases the ib_boundaries_block records.  */
5709 void
free_block_changes(void)5710 free_block_changes (void)
5711 {
5712   VEC_free (tree, gc, cfun->ib_boundaries_block);
5713 }
5714 
5715 /* Returns the name of the current function.  */
5716 const char *
current_function_name(void)5717 current_function_name (void)
5718 {
5719   return lang_hooks.decl_printable_name (cfun->decl, 2);
5720 }
5721 
5722 
5723 static unsigned int
rest_of_handle_check_leaf_regs(void)5724 rest_of_handle_check_leaf_regs (void)
5725 {
5726 #ifdef LEAF_REGISTERS
5727   current_function_uses_only_leaf_regs
5728     = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
5729 #endif
5730   return 0;
5731 }
5732 
5733 /* Insert a TYPE into the used types hash table of CFUN.  */
5734 static void
used_types_insert_helper(tree type,struct function * func)5735 used_types_insert_helper (tree type, struct function *func)
5736 {
5737   if (type != NULL && func != NULL)
5738     {
5739       void **slot;
5740 
5741       if (func->used_types_hash == NULL)
5742 	func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
5743 						 htab_eq_pointer, NULL);
5744       slot = htab_find_slot (func->used_types_hash, type, INSERT);
5745       if (*slot == NULL)
5746 	*slot = type;
5747     }
5748 }
5749 
5750 /* Given a type, insert it into the used hash table in cfun.  */
5751 void
used_types_insert(tree t)5752 used_types_insert (tree t)
5753 {
5754   while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
5755     t = TREE_TYPE (t);
5756   t = TYPE_MAIN_VARIANT (t);
5757   if (debug_info_level > DINFO_LEVEL_NONE)
5758     used_types_insert_helper (t, cfun);
5759 }
5760 
5761 struct tree_opt_pass pass_leaf_regs =
5762 {
5763   NULL,                                 /* name */
5764   NULL,                                 /* gate */
5765   rest_of_handle_check_leaf_regs,       /* execute */
5766   NULL,                                 /* sub */
5767   NULL,                                 /* next */
5768   0,                                    /* static_pass_number */
5769   0,                                    /* tv_id */
5770   0,                                    /* properties_required */
5771   0,                                    /* properties_provided */
5772   0,                                    /* properties_destroyed */
5773   0,                                    /* todo_flags_start */
5774   0,                                    /* todo_flags_finish */
5775   0                                     /* letter */
5776 };
5777 
5778 
5779 #include "gt-function.h"
5780