xref: /freebsd-12.1/contrib/gcc/cse.c (revision c6e21058)
1 /* Common subexpression elimination for GNU compiler.
2    Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3    1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING.  If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA.  */
21 
22 #include "config.h"
23 /* stdio.h must precede rtl.h for FFS.  */
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "regs.h"
31 #include "basic-block.h"
32 #include "flags.h"
33 #include "real.h"
34 #include "insn-config.h"
35 #include "recog.h"
36 #include "function.h"
37 #include "expr.h"
38 #include "toplev.h"
39 #include "output.h"
40 #include "ggc.h"
41 #include "timevar.h"
42 #include "except.h"
43 #include "target.h"
44 #include "params.h"
45 #include "rtlhooks-def.h"
46 #include "tree-pass.h"
47 
48 /* The basic idea of common subexpression elimination is to go
49    through the code, keeping a record of expressions that would
50    have the same value at the current scan point, and replacing
51    expressions encountered with the cheapest equivalent expression.
52 
53    It is too complicated to keep track of the different possibilities
54    when control paths merge in this code; so, at each label, we forget all
55    that is known and start fresh.  This can be described as processing each
56    extended basic block separately.  We have a separate pass to perform
57    global CSE.
58 
59    Note CSE can turn a conditional or computed jump into a nop or
60    an unconditional jump.  When this occurs we arrange to run the jump
61    optimizer after CSE to delete the unreachable code.
62 
63    We use two data structures to record the equivalent expressions:
64    a hash table for most expressions, and a vector of "quantity
65    numbers" to record equivalent (pseudo) registers.
66 
67    The use of the special data structure for registers is desirable
68    because it is faster.  It is possible because registers references
69    contain a fairly small number, the register number, taken from
70    a contiguously allocated series, and two register references are
71    identical if they have the same number.  General expressions
72    do not have any such thing, so the only way to retrieve the
73    information recorded on an expression other than a register
74    is to keep it in a hash table.
75 
76 Registers and "quantity numbers":
77 
78    At the start of each basic block, all of the (hardware and pseudo)
79    registers used in the function are given distinct quantity
80    numbers to indicate their contents.  During scan, when the code
81    copies one register into another, we copy the quantity number.
82    When a register is loaded in any other way, we allocate a new
83    quantity number to describe the value generated by this operation.
84    `REG_QTY (N)' records what quantity register N is currently thought
85    of as containing.
86 
87    All real quantity numbers are greater than or equal to zero.
88    If register N has not been assigned a quantity, `REG_QTY (N)' will
89    equal -N - 1, which is always negative.
90 
91    Quantity numbers below zero do not exist and none of the `qty_table'
92    entries should be referenced with a negative index.
93 
94    We also maintain a bidirectional chain of registers for each
95    quantity number.  The `qty_table` members `first_reg' and `last_reg',
96    and `reg_eqv_table' members `next' and `prev' hold these chains.
97 
98    The first register in a chain is the one whose lifespan is least local.
99    Among equals, it is the one that was seen first.
100    We replace any equivalent register with that one.
101 
102    If two registers have the same quantity number, it must be true that
103    REG expressions with qty_table `mode' must be in the hash table for both
104    registers and must be in the same class.
105 
106    The converse is not true.  Since hard registers may be referenced in
107    any mode, two REG expressions might be equivalent in the hash table
108    but not have the same quantity number if the quantity number of one
109    of the registers is not the same mode as those expressions.
110 
111 Constants and quantity numbers
112 
113    When a quantity has a known constant value, that value is stored
114    in the appropriate qty_table `const_rtx'.  This is in addition to
115    putting the constant in the hash table as is usual for non-regs.
116 
117    Whether a reg or a constant is preferred is determined by the configuration
118    macro CONST_COSTS and will often depend on the constant value.  In any
119    event, expressions containing constants can be simplified, by fold_rtx.
120 
121    When a quantity has a known nearly constant value (such as an address
122    of a stack slot), that value is stored in the appropriate qty_table
123    `const_rtx'.
124 
125    Integer constants don't have a machine mode.  However, cse
126    determines the intended machine mode from the destination
127    of the instruction that moves the constant.  The machine mode
128    is recorded in the hash table along with the actual RTL
129    constant expression so that different modes are kept separate.
130 
131 Other expressions:
132 
133    To record known equivalences among expressions in general
134    we use a hash table called `table'.  It has a fixed number of buckets
135    that contain chains of `struct table_elt' elements for expressions.
136    These chains connect the elements whose expressions have the same
137    hash codes.
138 
139    Other chains through the same elements connect the elements which
140    currently have equivalent values.
141 
142    Register references in an expression are canonicalized before hashing
143    the expression.  This is done using `reg_qty' and qty_table `first_reg'.
144    The hash code of a register reference is computed using the quantity
145    number, not the register number.
146 
147    When the value of an expression changes, it is necessary to remove from the
148    hash table not just that expression but all expressions whose values
149    could be different as a result.
150 
151      1. If the value changing is in memory, except in special cases
152      ANYTHING referring to memory could be changed.  That is because
153      nobody knows where a pointer does not point.
154      The function `invalidate_memory' removes what is necessary.
155 
156      The special cases are when the address is constant or is
157      a constant plus a fixed register such as the frame pointer
158      or a static chain pointer.  When such addresses are stored in,
159      we can tell exactly which other such addresses must be invalidated
160      due to overlap.  `invalidate' does this.
161      All expressions that refer to non-constant
162      memory addresses are also invalidated.  `invalidate_memory' does this.
163 
164      2. If the value changing is a register, all expressions
165      containing references to that register, and only those,
166      must be removed.
167 
168    Because searching the entire hash table for expressions that contain
169    a register is very slow, we try to figure out when it isn't necessary.
170    Precisely, this is necessary only when expressions have been
171    entered in the hash table using this register, and then the value has
172    changed, and then another expression wants to be added to refer to
173    the register's new value.  This sequence of circumstances is rare
174    within any one basic block.
175 
176    `REG_TICK' and `REG_IN_TABLE', accessors for members of
177    cse_reg_info, are used to detect this case.  REG_TICK (i) is
178    incremented whenever a value is stored in register i.
179    REG_IN_TABLE (i) holds -1 if no references to register i have been
180    entered in the table; otherwise, it contains the value REG_TICK (i)
181    had when the references were entered.  If we want to enter a
182    reference and REG_IN_TABLE (i) != REG_TICK (i), we must scan and
183    remove old references.  Until we want to enter a new entry, the
184    mere fact that the two vectors don't match makes the entries be
185    ignored if anyone tries to match them.
186 
187    Registers themselves are entered in the hash table as well as in
188    the equivalent-register chains.  However, `REG_TICK' and
189    `REG_IN_TABLE' do not apply to expressions which are simple
190    register references.  These expressions are removed from the table
191    immediately when they become invalid, and this can be done even if
192    we do not immediately search for all the expressions that refer to
193    the register.
194 
195    A CLOBBER rtx in an instruction invalidates its operand for further
196    reuse.  A CLOBBER or SET rtx whose operand is a MEM:BLK
197    invalidates everything that resides in memory.
198 
199 Related expressions:
200 
201    Constant expressions that differ only by an additive integer
202    are called related.  When a constant expression is put in
203    the table, the related expression with no constant term
204    is also entered.  These are made to point at each other
205    so that it is possible to find out if there exists any
206    register equivalent to an expression related to a given expression.  */
207 
208 /* Length of qty_table vector.  We know in advance we will not need
209    a quantity number this big.  */
210 
211 static int max_qty;
212 
213 /* Next quantity number to be allocated.
214    This is 1 + the largest number needed so far.  */
215 
216 static int next_qty;
217 
218 /* Per-qty information tracking.
219 
220    `first_reg' and `last_reg' track the head and tail of the
221    chain of registers which currently contain this quantity.
222 
223    `mode' contains the machine mode of this quantity.
224 
225    `const_rtx' holds the rtx of the constant value of this
226    quantity, if known.  A summations of the frame/arg pointer
227    and a constant can also be entered here.  When this holds
228    a known value, `const_insn' is the insn which stored the
229    constant value.
230 
231    `comparison_{code,const,qty}' are used to track when a
232    comparison between a quantity and some constant or register has
233    been passed.  In such a case, we know the results of the comparison
234    in case we see it again.  These members record a comparison that
235    is known to be true.  `comparison_code' holds the rtx code of such
236    a comparison, else it is set to UNKNOWN and the other two
237    comparison members are undefined.  `comparison_const' holds
238    the constant being compared against, or zero if the comparison
239    is not against a constant.  `comparison_qty' holds the quantity
240    being compared against when the result is known.  If the comparison
241    is not with a register, `comparison_qty' is -1.  */
242 
243 struct qty_table_elem
244 {
245   rtx const_rtx;
246   rtx const_insn;
247   rtx comparison_const;
248   int comparison_qty;
249   unsigned int first_reg, last_reg;
250   /* The sizes of these fields should match the sizes of the
251      code and mode fields of struct rtx_def (see rtl.h).  */
252   ENUM_BITFIELD(rtx_code) comparison_code : 16;
253   ENUM_BITFIELD(machine_mode) mode : 8;
254 };
255 
256 /* The table of all qtys, indexed by qty number.  */
257 static struct qty_table_elem *qty_table;
258 
259 /* Structure used to pass arguments via for_each_rtx to function
260    cse_change_cc_mode.  */
261 struct change_cc_mode_args
262 {
263   rtx insn;
264   rtx newreg;
265 };
266 
267 #ifdef HAVE_cc0
268 /* For machines that have a CC0, we do not record its value in the hash
269    table since its use is guaranteed to be the insn immediately following
270    its definition and any other insn is presumed to invalidate it.
271 
272    Instead, we store below the value last assigned to CC0.  If it should
273    happen to be a constant, it is stored in preference to the actual
274    assigned value.  In case it is a constant, we store the mode in which
275    the constant should be interpreted.  */
276 
277 static rtx prev_insn_cc0;
278 static enum machine_mode prev_insn_cc0_mode;
279 
280 /* Previous actual insn.  0 if at first insn of basic block.  */
281 
282 static rtx prev_insn;
283 #endif
284 
285 /* Insn being scanned.  */
286 
287 static rtx this_insn;
288 
289 /* Index by register number, gives the number of the next (or
290    previous) register in the chain of registers sharing the same
291    value.
292 
293    Or -1 if this register is at the end of the chain.
294 
295    If REG_QTY (N) == -N - 1, reg_eqv_table[N].next is undefined.  */
296 
297 /* Per-register equivalence chain.  */
298 struct reg_eqv_elem
299 {
300   int next, prev;
301 };
302 
303 /* The table of all register equivalence chains.  */
304 static struct reg_eqv_elem *reg_eqv_table;
305 
306 struct cse_reg_info
307 {
308   /* The timestamp at which this register is initialized.  */
309   unsigned int timestamp;
310 
311   /* The quantity number of the register's current contents.  */
312   int reg_qty;
313 
314   /* The number of times the register has been altered in the current
315      basic block.  */
316   int reg_tick;
317 
318   /* The REG_TICK value at which rtx's containing this register are
319      valid in the hash table.  If this does not equal the current
320      reg_tick value, such expressions existing in the hash table are
321      invalid.  */
322   int reg_in_table;
323 
324   /* The SUBREG that was set when REG_TICK was last incremented.  Set
325      to -1 if the last store was to the whole register, not a subreg.  */
326   unsigned int subreg_ticked;
327 };
328 
329 /* A table of cse_reg_info indexed by register numbers.  */
330 static struct cse_reg_info *cse_reg_info_table;
331 
332 /* The size of the above table.  */
333 static unsigned int cse_reg_info_table_size;
334 
335 /* The index of the first entry that has not been initialized.  */
336 static unsigned int cse_reg_info_table_first_uninitialized;
337 
338 /* The timestamp at the beginning of the current run of
339    cse_basic_block.  We increment this variable at the beginning of
340    the current run of cse_basic_block.  The timestamp field of a
341    cse_reg_info entry matches the value of this variable if and only
342    if the entry has been initialized during the current run of
343    cse_basic_block.  */
344 static unsigned int cse_reg_info_timestamp;
345 
346 /* A HARD_REG_SET containing all the hard registers for which there is
347    currently a REG expression in the hash table.  Note the difference
348    from the above variables, which indicate if the REG is mentioned in some
349    expression in the table.  */
350 
351 static HARD_REG_SET hard_regs_in_table;
352 
353 /* CUID of insn that starts the basic block currently being cse-processed.  */
354 
355 static int cse_basic_block_start;
356 
357 /* CUID of insn that ends the basic block currently being cse-processed.  */
358 
359 static int cse_basic_block_end;
360 
361 /* Vector mapping INSN_UIDs to cuids.
362    The cuids are like uids but increase monotonically always.
363    We use them to see whether a reg is used outside a given basic block.  */
364 
365 static int *uid_cuid;
366 
367 /* Highest UID in UID_CUID.  */
368 static int max_uid;
369 
370 /* Get the cuid of an insn.  */
371 
372 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
373 
374 /* Nonzero if this pass has made changes, and therefore it's
375    worthwhile to run the garbage collector.  */
376 
377 static int cse_altered;
378 
379 /* Nonzero if cse has altered conditional jump insns
380    in such a way that jump optimization should be redone.  */
381 
382 static int cse_jumps_altered;
383 
384 /* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
385    REG_LABEL, we have to rerun jump after CSE to put in the note.  */
386 static int recorded_label_ref;
387 
388 /* canon_hash stores 1 in do_not_record
389    if it notices a reference to CC0, PC, or some other volatile
390    subexpression.  */
391 
392 static int do_not_record;
393 
394 /* canon_hash stores 1 in hash_arg_in_memory
395    if it notices a reference to memory within the expression being hashed.  */
396 
397 static int hash_arg_in_memory;
398 
399 /* The hash table contains buckets which are chains of `struct table_elt's,
400    each recording one expression's information.
401    That expression is in the `exp' field.
402 
403    The canon_exp field contains a canonical (from the point of view of
404    alias analysis) version of the `exp' field.
405 
406    Those elements with the same hash code are chained in both directions
407    through the `next_same_hash' and `prev_same_hash' fields.
408 
409    Each set of expressions with equivalent values
410    are on a two-way chain through the `next_same_value'
411    and `prev_same_value' fields, and all point with
412    the `first_same_value' field at the first element in
413    that chain.  The chain is in order of increasing cost.
414    Each element's cost value is in its `cost' field.
415 
416    The `in_memory' field is nonzero for elements that
417    involve any reference to memory.  These elements are removed
418    whenever a write is done to an unidentified location in memory.
419    To be safe, we assume that a memory address is unidentified unless
420    the address is either a symbol constant or a constant plus
421    the frame pointer or argument pointer.
422 
423    The `related_value' field is used to connect related expressions
424    (that differ by adding an integer).
425    The related expressions are chained in a circular fashion.
426    `related_value' is zero for expressions for which this
427    chain is not useful.
428 
429    The `cost' field stores the cost of this element's expression.
430    The `regcost' field stores the value returned by approx_reg_cost for
431    this element's expression.
432 
433    The `is_const' flag is set if the element is a constant (including
434    a fixed address).
435 
436    The `flag' field is used as a temporary during some search routines.
437 
438    The `mode' field is usually the same as GET_MODE (`exp'), but
439    if `exp' is a CONST_INT and has no machine mode then the `mode'
440    field is the mode it was being used as.  Each constant is
441    recorded separately for each mode it is used with.  */
442 
443 struct table_elt
444 {
445   rtx exp;
446   rtx canon_exp;
447   struct table_elt *next_same_hash;
448   struct table_elt *prev_same_hash;
449   struct table_elt *next_same_value;
450   struct table_elt *prev_same_value;
451   struct table_elt *first_same_value;
452   struct table_elt *related_value;
453   int cost;
454   int regcost;
455   /* The size of this field should match the size
456      of the mode field of struct rtx_def (see rtl.h).  */
457   ENUM_BITFIELD(machine_mode) mode : 8;
458   char in_memory;
459   char is_const;
460   char flag;
461 };
462 
463 /* We don't want a lot of buckets, because we rarely have very many
464    things stored in the hash table, and a lot of buckets slows
465    down a lot of loops that happen frequently.  */
466 #define HASH_SHIFT	5
467 #define HASH_SIZE	(1 << HASH_SHIFT)
468 #define HASH_MASK	(HASH_SIZE - 1)
469 
470 /* Compute hash code of X in mode M.  Special-case case where X is a pseudo
471    register (hard registers may require `do_not_record' to be set).  */
472 
473 #define HASH(X, M)	\
474  ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER	\
475   ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X)))	\
476   : canon_hash (X, M)) & HASH_MASK)
477 
478 /* Like HASH, but without side-effects.  */
479 #define SAFE_HASH(X, M)	\
480  ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER	\
481   ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X)))	\
482   : safe_hash (X, M)) & HASH_MASK)
483 
484 /* Determine whether register number N is considered a fixed register for the
485    purpose of approximating register costs.
486    It is desirable to replace other regs with fixed regs, to reduce need for
487    non-fixed hard regs.
488    A reg wins if it is either the frame pointer or designated as fixed.  */
489 #define FIXED_REGNO_P(N)  \
490   ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
491    || fixed_regs[N] || global_regs[N])
492 
493 /* Compute cost of X, as stored in the `cost' field of a table_elt.  Fixed
494    hard registers and pointers into the frame are the cheapest with a cost
495    of 0.  Next come pseudos with a cost of one and other hard registers with
496    a cost of 2.  Aside from these special cases, call `rtx_cost'.  */
497 
498 #define CHEAP_REGNO(N)							\
499   (REGNO_PTR_FRAME_P(N)							\
500    || (HARD_REGISTER_NUM_P (N)						\
501        && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
502 
503 #define COST(X) (REG_P (X) ? 0 : notreg_cost (X, SET))
504 #define COST_IN(X,OUTER) (REG_P (X) ? 0 : notreg_cost (X, OUTER))
505 
506 /* Get the number of times this register has been updated in this
507    basic block.  */
508 
509 #define REG_TICK(N) (get_cse_reg_info (N)->reg_tick)
510 
511 /* Get the point at which REG was recorded in the table.  */
512 
513 #define REG_IN_TABLE(N) (get_cse_reg_info (N)->reg_in_table)
514 
515 /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
516    SUBREG).  */
517 
518 #define SUBREG_TICKED(N) (get_cse_reg_info (N)->subreg_ticked)
519 
520 /* Get the quantity number for REG.  */
521 
522 #define REG_QTY(N) (get_cse_reg_info (N)->reg_qty)
523 
524 /* Determine if the quantity number for register X represents a valid index
525    into the qty_table.  */
526 
527 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
528 
529 static struct table_elt *table[HASH_SIZE];
530 
531 /* Number of elements in the hash table.  */
532 
533 static unsigned int table_size;
534 
535 /* Chain of `struct table_elt's made so far for this function
536    but currently removed from the table.  */
537 
538 static struct table_elt *free_element_chain;
539 
540 /* Set to the cost of a constant pool reference if one was found for a
541    symbolic constant.  If this was found, it means we should try to
542    convert constants into constant pool entries if they don't fit in
543    the insn.  */
544 
545 static int constant_pool_entries_cost;
546 static int constant_pool_entries_regcost;
547 
548 /* This data describes a block that will be processed by cse_basic_block.  */
549 
550 struct cse_basic_block_data
551 {
552   /* Lowest CUID value of insns in block.  */
553   int low_cuid;
554   /* Highest CUID value of insns in block.  */
555   int high_cuid;
556   /* Total number of SETs in block.  */
557   int nsets;
558   /* Last insn in the block.  */
559   rtx last;
560   /* Size of current branch path, if any.  */
561   int path_size;
562   /* Current branch path, indicating which branches will be taken.  */
563   struct branch_path
564     {
565       /* The branch insn.  */
566       rtx branch;
567       /* Whether it should be taken or not.  AROUND is the same as taken
568 	 except that it is used when the destination label is not preceded
569        by a BARRIER.  */
570       enum taken {PATH_TAKEN, PATH_NOT_TAKEN, PATH_AROUND} status;
571     } *path;
572 };
573 
574 static bool fixed_base_plus_p (rtx x);
575 static int notreg_cost (rtx, enum rtx_code);
576 static int approx_reg_cost_1 (rtx *, void *);
577 static int approx_reg_cost (rtx);
578 static int preferable (int, int, int, int);
579 static void new_basic_block (void);
580 static void make_new_qty (unsigned int, enum machine_mode);
581 static void make_regs_eqv (unsigned int, unsigned int);
582 static void delete_reg_equiv (unsigned int);
583 static int mention_regs (rtx);
584 static int insert_regs (rtx, struct table_elt *, int);
585 static void remove_from_table (struct table_elt *, unsigned);
586 static void remove_pseudo_from_table (rtx, unsigned);
587 static struct table_elt *lookup (rtx, unsigned, enum machine_mode);
588 static struct table_elt *lookup_for_remove (rtx, unsigned, enum machine_mode);
589 static rtx lookup_as_function (rtx, enum rtx_code);
590 static struct table_elt *insert (rtx, struct table_elt *, unsigned,
591 				 enum machine_mode);
592 static void merge_equiv_classes (struct table_elt *, struct table_elt *);
593 static void invalidate (rtx, enum machine_mode);
594 static int cse_rtx_varies_p (rtx, int);
595 static void remove_invalid_refs (unsigned int);
596 static void remove_invalid_subreg_refs (unsigned int, unsigned int,
597 					enum machine_mode);
598 static void rehash_using_reg (rtx);
599 static void invalidate_memory (void);
600 static void invalidate_for_call (void);
601 static rtx use_related_value (rtx, struct table_elt *);
602 
603 static inline unsigned canon_hash (rtx, enum machine_mode);
604 static inline unsigned safe_hash (rtx, enum machine_mode);
605 static unsigned hash_rtx_string (const char *);
606 
607 static rtx canon_reg (rtx, rtx);
608 static void find_best_addr (rtx, rtx *, enum machine_mode);
609 static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
610 					   enum machine_mode *,
611 					   enum machine_mode *);
612 static rtx fold_rtx (rtx, rtx);
613 static rtx equiv_constant (rtx);
614 static void record_jump_equiv (rtx, int);
615 static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
616 			      int);
617 static void cse_insn (rtx, rtx);
618 static void cse_end_of_basic_block (rtx, struct cse_basic_block_data *,
619 				    int, int);
620 static int addr_affects_sp_p (rtx);
621 static void invalidate_from_clobbers (rtx);
622 static rtx cse_process_notes (rtx, rtx);
623 static void invalidate_skipped_set (rtx, rtx, void *);
624 static void invalidate_skipped_block (rtx);
625 static rtx cse_basic_block (rtx, rtx, struct branch_path *);
626 static void count_reg_usage (rtx, int *, rtx, int);
627 static int check_for_label_ref (rtx *, void *);
628 extern void dump_class (struct table_elt*);
629 static void get_cse_reg_info_1 (unsigned int regno);
630 static struct cse_reg_info * get_cse_reg_info (unsigned int regno);
631 static int check_dependence (rtx *, void *);
632 
633 static void flush_hash_table (void);
634 static bool insn_live_p (rtx, int *);
635 static bool set_live_p (rtx, rtx, int *);
636 static bool dead_libcall_p (rtx, int *);
637 static int cse_change_cc_mode (rtx *, void *);
638 static void cse_change_cc_mode_insn (rtx, rtx);
639 static void cse_change_cc_mode_insns (rtx, rtx, rtx);
640 static enum machine_mode cse_cc_succs (basic_block, rtx, rtx, bool);
641 
642 
643 #undef RTL_HOOKS_GEN_LOWPART
644 #define RTL_HOOKS_GEN_LOWPART		gen_lowpart_if_possible
645 
646 static const struct rtl_hooks cse_rtl_hooks = RTL_HOOKS_INITIALIZER;
647 
648 /* Nonzero if X has the form (PLUS frame-pointer integer).  We check for
649    virtual regs here because the simplify_*_operation routines are called
650    by integrate.c, which is called before virtual register instantiation.  */
651 
652 static bool
fixed_base_plus_p(rtx x)653 fixed_base_plus_p (rtx x)
654 {
655   switch (GET_CODE (x))
656     {
657     case REG:
658       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
659 	return true;
660       if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
661 	return true;
662       if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
663 	  && REGNO (x) <= LAST_VIRTUAL_REGISTER)
664 	return true;
665       return false;
666 
667     case PLUS:
668       if (GET_CODE (XEXP (x, 1)) != CONST_INT)
669 	return false;
670       return fixed_base_plus_p (XEXP (x, 0));
671 
672     default:
673       return false;
674     }
675 }
676 
677 /* Dump the expressions in the equivalence class indicated by CLASSP.
678    This function is used only for debugging.  */
679 void
dump_class(struct table_elt * classp)680 dump_class (struct table_elt *classp)
681 {
682   struct table_elt *elt;
683 
684   fprintf (stderr, "Equivalence chain for ");
685   print_rtl (stderr, classp->exp);
686   fprintf (stderr, ": \n");
687 
688   for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
689     {
690       print_rtl (stderr, elt->exp);
691       fprintf (stderr, "\n");
692     }
693 }
694 
695 /* Subroutine of approx_reg_cost; called through for_each_rtx.  */
696 
697 static int
approx_reg_cost_1(rtx * xp,void * data)698 approx_reg_cost_1 (rtx *xp, void *data)
699 {
700   rtx x = *xp;
701   int *cost_p = data;
702 
703   if (x && REG_P (x))
704     {
705       unsigned int regno = REGNO (x);
706 
707       if (! CHEAP_REGNO (regno))
708 	{
709 	  if (regno < FIRST_PSEUDO_REGISTER)
710 	    {
711 	      if (SMALL_REGISTER_CLASSES)
712 		return 1;
713 	      *cost_p += 2;
714 	    }
715 	  else
716 	    *cost_p += 1;
717 	}
718     }
719 
720   return 0;
721 }
722 
723 /* Return an estimate of the cost of the registers used in an rtx.
724    This is mostly the number of different REG expressions in the rtx;
725    however for some exceptions like fixed registers we use a cost of
726    0.  If any other hard register reference occurs, return MAX_COST.  */
727 
728 static int
approx_reg_cost(rtx x)729 approx_reg_cost (rtx x)
730 {
731   int cost = 0;
732 
733   if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
734     return MAX_COST;
735 
736   return cost;
737 }
738 
739 /* Returns a canonical version of X for the address, from the point of view,
740    that all multiplications are represented as MULT instead of the multiply
741    by a power of 2 being represented as ASHIFT.  */
742 
743 static rtx
canon_for_address(rtx x)744 canon_for_address (rtx x)
745 {
746   enum rtx_code code;
747   enum machine_mode mode;
748   rtx new = 0;
749   int i;
750   const char *fmt;
751 
752   if (!x)
753     return x;
754 
755   code = GET_CODE (x);
756   mode = GET_MODE (x);
757 
758   switch (code)
759     {
760     case ASHIFT:
761       if (GET_CODE (XEXP (x, 1)) == CONST_INT
762 	  && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode)
763 	  && INTVAL (XEXP (x, 1)) >= 0)
764         {
765 	  new = canon_for_address (XEXP (x, 0));
766 	  new = gen_rtx_MULT (mode, new,
767 			      gen_int_mode ((HOST_WIDE_INT) 1
768 				            << INTVAL (XEXP (x, 1)),
769 					    mode));
770 	}
771       break;
772     default:
773       break;
774 
775     }
776   if (new)
777     return new;
778 
779   /* Now recursively process each operand of this operation.  */
780   fmt = GET_RTX_FORMAT (code);
781   for (i = 0; i < GET_RTX_LENGTH (code); i++)
782     if (fmt[i] == 'e')
783       {
784 	new = canon_for_address (XEXP (x, i));
785 	XEXP (x, i) = new;
786       }
787   return x;
788 }
789 
790 /* Return a negative value if an rtx A, whose costs are given by COST_A
791    and REGCOST_A, is more desirable than an rtx B.
792    Return a positive value if A is less desirable, or 0 if the two are
793    equally good.  */
794 static int
preferable(int cost_a,int regcost_a,int cost_b,int regcost_b)795 preferable (int cost_a, int regcost_a, int cost_b, int regcost_b)
796 {
797   /* First, get rid of cases involving expressions that are entirely
798      unwanted.  */
799   if (cost_a != cost_b)
800     {
801       if (cost_a == MAX_COST)
802 	return 1;
803       if (cost_b == MAX_COST)
804 	return -1;
805     }
806 
807   /* Avoid extending lifetimes of hardregs.  */
808   if (regcost_a != regcost_b)
809     {
810       if (regcost_a == MAX_COST)
811 	return 1;
812       if (regcost_b == MAX_COST)
813 	return -1;
814     }
815 
816   /* Normal operation costs take precedence.  */
817   if (cost_a != cost_b)
818     return cost_a - cost_b;
819   /* Only if these are identical consider effects on register pressure.  */
820   if (regcost_a != regcost_b)
821     return regcost_a - regcost_b;
822   return 0;
823 }
824 
825 /* Internal function, to compute cost when X is not a register; called
826    from COST macro to keep it simple.  */
827 
828 static int
notreg_cost(rtx x,enum rtx_code outer)829 notreg_cost (rtx x, enum rtx_code outer)
830 {
831   return ((GET_CODE (x) == SUBREG
832 	   && REG_P (SUBREG_REG (x))
833 	   && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
834 	   && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
835 	   && (GET_MODE_SIZE (GET_MODE (x))
836 	       < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
837 	   && subreg_lowpart_p (x)
838 	   && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
839 				     GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
840 	  ? 0
841 	  : rtx_cost (x, outer) * 2);
842 }
843 
844 
845 /* Initialize CSE_REG_INFO_TABLE.  */
846 
847 static void
init_cse_reg_info(unsigned int nregs)848 init_cse_reg_info (unsigned int nregs)
849 {
850   /* Do we need to grow the table?  */
851   if (nregs > cse_reg_info_table_size)
852     {
853       unsigned int new_size;
854 
855       if (cse_reg_info_table_size < 2048)
856 	{
857 	  /* Compute a new size that is a power of 2 and no smaller
858 	     than the large of NREGS and 64.  */
859 	  new_size = (cse_reg_info_table_size
860 		      ? cse_reg_info_table_size : 64);
861 
862 	  while (new_size < nregs)
863 	    new_size *= 2;
864 	}
865       else
866 	{
867 	  /* If we need a big table, allocate just enough to hold
868 	     NREGS registers.  */
869 	  new_size = nregs;
870 	}
871 
872       /* Reallocate the table with NEW_SIZE entries.  */
873       if (cse_reg_info_table)
874 	free (cse_reg_info_table);
875       cse_reg_info_table = XNEWVEC (struct cse_reg_info, new_size);
876       cse_reg_info_table_size = new_size;
877       cse_reg_info_table_first_uninitialized = 0;
878     }
879 
880   /* Do we have all of the first NREGS entries initialized?  */
881   if (cse_reg_info_table_first_uninitialized < nregs)
882     {
883       unsigned int old_timestamp = cse_reg_info_timestamp - 1;
884       unsigned int i;
885 
886       /* Put the old timestamp on newly allocated entries so that they
887 	 will all be considered out of date.  We do not touch those
888 	 entries beyond the first NREGS entries to be nice to the
889 	 virtual memory.  */
890       for (i = cse_reg_info_table_first_uninitialized; i < nregs; i++)
891 	cse_reg_info_table[i].timestamp = old_timestamp;
892 
893       cse_reg_info_table_first_uninitialized = nregs;
894     }
895 }
896 
897 /* Given REGNO, initialize the cse_reg_info entry for REGNO.  */
898 
899 static void
get_cse_reg_info_1(unsigned int regno)900 get_cse_reg_info_1 (unsigned int regno)
901 {
902   /* Set TIMESTAMP field to CSE_REG_INFO_TIMESTAMP so that this
903      entry will be considered to have been initialized.  */
904   cse_reg_info_table[regno].timestamp = cse_reg_info_timestamp;
905 
906   /* Initialize the rest of the entry.  */
907   cse_reg_info_table[regno].reg_tick = 1;
908   cse_reg_info_table[regno].reg_in_table = -1;
909   cse_reg_info_table[regno].subreg_ticked = -1;
910   cse_reg_info_table[regno].reg_qty = -regno - 1;
911 }
912 
913 /* Find a cse_reg_info entry for REGNO.  */
914 
915 static inline struct cse_reg_info *
get_cse_reg_info(unsigned int regno)916 get_cse_reg_info (unsigned int regno)
917 {
918   struct cse_reg_info *p = &cse_reg_info_table[regno];
919 
920   /* If this entry has not been initialized, go ahead and initialize
921      it.  */
922   if (p->timestamp != cse_reg_info_timestamp)
923     get_cse_reg_info_1 (regno);
924 
925   return p;
926 }
927 
928 /* Clear the hash table and initialize each register with its own quantity,
929    for a new basic block.  */
930 
931 static void
new_basic_block(void)932 new_basic_block (void)
933 {
934   int i;
935 
936   next_qty = 0;
937 
938   /* Invalidate cse_reg_info_table.  */
939   cse_reg_info_timestamp++;
940 
941   /* Clear out hash table state for this pass.  */
942   CLEAR_HARD_REG_SET (hard_regs_in_table);
943 
944   /* The per-quantity values used to be initialized here, but it is
945      much faster to initialize each as it is made in `make_new_qty'.  */
946 
947   for (i = 0; i < HASH_SIZE; i++)
948     {
949       struct table_elt *first;
950 
951       first = table[i];
952       if (first != NULL)
953 	{
954 	  struct table_elt *last = first;
955 
956 	  table[i] = NULL;
957 
958 	  while (last->next_same_hash != NULL)
959 	    last = last->next_same_hash;
960 
961 	  /* Now relink this hash entire chain into
962 	     the free element list.  */
963 
964 	  last->next_same_hash = free_element_chain;
965 	  free_element_chain = first;
966 	}
967     }
968 
969   table_size = 0;
970 
971 #ifdef HAVE_cc0
972   prev_insn = 0;
973   prev_insn_cc0 = 0;
974 #endif
975 }
976 
977 /* Say that register REG contains a quantity in mode MODE not in any
978    register before and initialize that quantity.  */
979 
980 static void
make_new_qty(unsigned int reg,enum machine_mode mode)981 make_new_qty (unsigned int reg, enum machine_mode mode)
982 {
983   int q;
984   struct qty_table_elem *ent;
985   struct reg_eqv_elem *eqv;
986 
987   gcc_assert (next_qty < max_qty);
988 
989   q = REG_QTY (reg) = next_qty++;
990   ent = &qty_table[q];
991   ent->first_reg = reg;
992   ent->last_reg = reg;
993   ent->mode = mode;
994   ent->const_rtx = ent->const_insn = NULL_RTX;
995   ent->comparison_code = UNKNOWN;
996 
997   eqv = &reg_eqv_table[reg];
998   eqv->next = eqv->prev = -1;
999 }
1000 
1001 /* Make reg NEW equivalent to reg OLD.
1002    OLD is not changing; NEW is.  */
1003 
1004 static void
make_regs_eqv(unsigned int new,unsigned int old)1005 make_regs_eqv (unsigned int new, unsigned int old)
1006 {
1007   unsigned int lastr, firstr;
1008   int q = REG_QTY (old);
1009   struct qty_table_elem *ent;
1010 
1011   ent = &qty_table[q];
1012 
1013   /* Nothing should become eqv until it has a "non-invalid" qty number.  */
1014   gcc_assert (REGNO_QTY_VALID_P (old));
1015 
1016   REG_QTY (new) = q;
1017   firstr = ent->first_reg;
1018   lastr = ent->last_reg;
1019 
1020   /* Prefer fixed hard registers to anything.  Prefer pseudo regs to other
1021      hard regs.  Among pseudos, if NEW will live longer than any other reg
1022      of the same qty, and that is beyond the current basic block,
1023      make it the new canonical replacement for this qty.  */
1024   if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1025       /* Certain fixed registers might be of the class NO_REGS.  This means
1026 	 that not only can they not be allocated by the compiler, but
1027 	 they cannot be used in substitutions or canonicalizations
1028 	 either.  */
1029       && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1030       && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1031 	  || (new >= FIRST_PSEUDO_REGISTER
1032 	      && (firstr < FIRST_PSEUDO_REGISTER
1033 		  || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1034 		       || (uid_cuid[REGNO_FIRST_UID (new)]
1035 			   < cse_basic_block_start))
1036 		      && (uid_cuid[REGNO_LAST_UID (new)]
1037 			  > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1038     {
1039       reg_eqv_table[firstr].prev = new;
1040       reg_eqv_table[new].next = firstr;
1041       reg_eqv_table[new].prev = -1;
1042       ent->first_reg = new;
1043     }
1044   else
1045     {
1046       /* If NEW is a hard reg (known to be non-fixed), insert at end.
1047 	 Otherwise, insert before any non-fixed hard regs that are at the
1048 	 end.  Registers of class NO_REGS cannot be used as an
1049 	 equivalent for anything.  */
1050       while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1051 	     && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1052 	     && new >= FIRST_PSEUDO_REGISTER)
1053 	lastr = reg_eqv_table[lastr].prev;
1054       reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1055       if (reg_eqv_table[lastr].next >= 0)
1056 	reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1057       else
1058 	qty_table[q].last_reg = new;
1059       reg_eqv_table[lastr].next = new;
1060       reg_eqv_table[new].prev = lastr;
1061     }
1062 }
1063 
1064 /* Remove REG from its equivalence class.  */
1065 
1066 static void
delete_reg_equiv(unsigned int reg)1067 delete_reg_equiv (unsigned int reg)
1068 {
1069   struct qty_table_elem *ent;
1070   int q = REG_QTY (reg);
1071   int p, n;
1072 
1073   /* If invalid, do nothing.  */
1074   if (! REGNO_QTY_VALID_P (reg))
1075     return;
1076 
1077   ent = &qty_table[q];
1078 
1079   p = reg_eqv_table[reg].prev;
1080   n = reg_eqv_table[reg].next;
1081 
1082   if (n != -1)
1083     reg_eqv_table[n].prev = p;
1084   else
1085     ent->last_reg = p;
1086   if (p != -1)
1087     reg_eqv_table[p].next = n;
1088   else
1089     ent->first_reg = n;
1090 
1091   REG_QTY (reg) = -reg - 1;
1092 }
1093 
1094 /* Remove any invalid expressions from the hash table
1095    that refer to any of the registers contained in expression X.
1096 
1097    Make sure that newly inserted references to those registers
1098    as subexpressions will be considered valid.
1099 
1100    mention_regs is not called when a register itself
1101    is being stored in the table.
1102 
1103    Return 1 if we have done something that may have changed the hash code
1104    of X.  */
1105 
1106 static int
mention_regs(rtx x)1107 mention_regs (rtx x)
1108 {
1109   enum rtx_code code;
1110   int i, j;
1111   const char *fmt;
1112   int changed = 0;
1113 
1114   if (x == 0)
1115     return 0;
1116 
1117   code = GET_CODE (x);
1118   if (code == REG)
1119     {
1120       unsigned int regno = REGNO (x);
1121       unsigned int endregno
1122 	= regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1123 		   : hard_regno_nregs[regno][GET_MODE (x)]);
1124       unsigned int i;
1125 
1126       for (i = regno; i < endregno; i++)
1127 	{
1128 	  if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1129 	    remove_invalid_refs (i);
1130 
1131 	  REG_IN_TABLE (i) = REG_TICK (i);
1132 	  SUBREG_TICKED (i) = -1;
1133 	}
1134 
1135       return 0;
1136     }
1137 
1138   /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1139      pseudo if they don't use overlapping words.  We handle only pseudos
1140      here for simplicity.  */
1141   if (code == SUBREG && REG_P (SUBREG_REG (x))
1142       && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1143     {
1144       unsigned int i = REGNO (SUBREG_REG (x));
1145 
1146       if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1147 	{
1148 	  /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1149 	     the last store to this register really stored into this
1150 	     subreg, then remove the memory of this subreg.
1151 	     Otherwise, remove any memory of the entire register and
1152 	     all its subregs from the table.  */
1153 	  if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1154 	      || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1155 	    remove_invalid_refs (i);
1156 	  else
1157 	    remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1158 	}
1159 
1160       REG_IN_TABLE (i) = REG_TICK (i);
1161       SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1162       return 0;
1163     }
1164 
1165   /* If X is a comparison or a COMPARE and either operand is a register
1166      that does not have a quantity, give it one.  This is so that a later
1167      call to record_jump_equiv won't cause X to be assigned a different
1168      hash code and not found in the table after that call.
1169 
1170      It is not necessary to do this here, since rehash_using_reg can
1171      fix up the table later, but doing this here eliminates the need to
1172      call that expensive function in the most common case where the only
1173      use of the register is in the comparison.  */
1174 
1175   if (code == COMPARE || COMPARISON_P (x))
1176     {
1177       if (REG_P (XEXP (x, 0))
1178 	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1179 	if (insert_regs (XEXP (x, 0), NULL, 0))
1180 	  {
1181 	    rehash_using_reg (XEXP (x, 0));
1182 	    changed = 1;
1183 	  }
1184 
1185       if (REG_P (XEXP (x, 1))
1186 	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1187 	if (insert_regs (XEXP (x, 1), NULL, 0))
1188 	  {
1189 	    rehash_using_reg (XEXP (x, 1));
1190 	    changed = 1;
1191 	  }
1192     }
1193 
1194   fmt = GET_RTX_FORMAT (code);
1195   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1196     if (fmt[i] == 'e')
1197       changed |= mention_regs (XEXP (x, i));
1198     else if (fmt[i] == 'E')
1199       for (j = 0; j < XVECLEN (x, i); j++)
1200 	changed |= mention_regs (XVECEXP (x, i, j));
1201 
1202   return changed;
1203 }
1204 
1205 /* Update the register quantities for inserting X into the hash table
1206    with a value equivalent to CLASSP.
1207    (If the class does not contain a REG, it is irrelevant.)
1208    If MODIFIED is nonzero, X is a destination; it is being modified.
1209    Note that delete_reg_equiv should be called on a register
1210    before insert_regs is done on that register with MODIFIED != 0.
1211 
1212    Nonzero value means that elements of reg_qty have changed
1213    so X's hash code may be different.  */
1214 
1215 static int
insert_regs(rtx x,struct table_elt * classp,int modified)1216 insert_regs (rtx x, struct table_elt *classp, int modified)
1217 {
1218   if (REG_P (x))
1219     {
1220       unsigned int regno = REGNO (x);
1221       int qty_valid;
1222 
1223       /* If REGNO is in the equivalence table already but is of the
1224 	 wrong mode for that equivalence, don't do anything here.  */
1225 
1226       qty_valid = REGNO_QTY_VALID_P (regno);
1227       if (qty_valid)
1228 	{
1229 	  struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1230 
1231 	  if (ent->mode != GET_MODE (x))
1232 	    return 0;
1233 	}
1234 
1235       if (modified || ! qty_valid)
1236 	{
1237 	  if (classp)
1238 	    for (classp = classp->first_same_value;
1239 		 classp != 0;
1240 		 classp = classp->next_same_value)
1241 	      if (REG_P (classp->exp)
1242 		  && GET_MODE (classp->exp) == GET_MODE (x))
1243 		{
1244 		  unsigned c_regno = REGNO (classp->exp);
1245 
1246 		  gcc_assert (REGNO_QTY_VALID_P (c_regno));
1247 
1248 		  /* Suppose that 5 is hard reg and 100 and 101 are
1249 		     pseudos.  Consider
1250 
1251 		     (set (reg:si 100) (reg:si 5))
1252 		     (set (reg:si 5) (reg:si 100))
1253 		     (set (reg:di 101) (reg:di 5))
1254 
1255 		     We would now set REG_QTY (101) = REG_QTY (5), but the
1256 		     entry for 5 is in SImode.  When we use this later in
1257 		     copy propagation, we get the register in wrong mode.  */
1258 		  if (qty_table[REG_QTY (c_regno)].mode != GET_MODE (x))
1259 		    continue;
1260 
1261 		  make_regs_eqv (regno, c_regno);
1262 		  return 1;
1263 		}
1264 
1265 	  /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1266 	     than REG_IN_TABLE to find out if there was only a single preceding
1267 	     invalidation - for the SUBREG - or another one, which would be
1268 	     for the full register.  However, if we find here that REG_TICK
1269 	     indicates that the register is invalid, it means that it has
1270 	     been invalidated in a separate operation.  The SUBREG might be used
1271 	     now (then this is a recursive call), or we might use the full REG
1272 	     now and a SUBREG of it later.  So bump up REG_TICK so that
1273 	     mention_regs will do the right thing.  */
1274 	  if (! modified
1275 	      && REG_IN_TABLE (regno) >= 0
1276 	      && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1277 	    REG_TICK (regno)++;
1278 	  make_new_qty (regno, GET_MODE (x));
1279 	  return 1;
1280 	}
1281 
1282       return 0;
1283     }
1284 
1285   /* If X is a SUBREG, we will likely be inserting the inner register in the
1286      table.  If that register doesn't have an assigned quantity number at
1287      this point but does later, the insertion that we will be doing now will
1288      not be accessible because its hash code will have changed.  So assign
1289      a quantity number now.  */
1290 
1291   else if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x))
1292 	   && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1293     {
1294       insert_regs (SUBREG_REG (x), NULL, 0);
1295       mention_regs (x);
1296       return 1;
1297     }
1298   else
1299     return mention_regs (x);
1300 }
1301 
1302 /* Look in or update the hash table.  */
1303 
1304 /* Remove table element ELT from use in the table.
1305    HASH is its hash code, made using the HASH macro.
1306    It's an argument because often that is known in advance
1307    and we save much time not recomputing it.  */
1308 
1309 static void
remove_from_table(struct table_elt * elt,unsigned int hash)1310 remove_from_table (struct table_elt *elt, unsigned int hash)
1311 {
1312   if (elt == 0)
1313     return;
1314 
1315   /* Mark this element as removed.  See cse_insn.  */
1316   elt->first_same_value = 0;
1317 
1318   /* Remove the table element from its equivalence class.  */
1319 
1320   {
1321     struct table_elt *prev = elt->prev_same_value;
1322     struct table_elt *next = elt->next_same_value;
1323 
1324     if (next)
1325       next->prev_same_value = prev;
1326 
1327     if (prev)
1328       prev->next_same_value = next;
1329     else
1330       {
1331 	struct table_elt *newfirst = next;
1332 	while (next)
1333 	  {
1334 	    next->first_same_value = newfirst;
1335 	    next = next->next_same_value;
1336 	  }
1337       }
1338   }
1339 
1340   /* Remove the table element from its hash bucket.  */
1341 
1342   {
1343     struct table_elt *prev = elt->prev_same_hash;
1344     struct table_elt *next = elt->next_same_hash;
1345 
1346     if (next)
1347       next->prev_same_hash = prev;
1348 
1349     if (prev)
1350       prev->next_same_hash = next;
1351     else if (table[hash] == elt)
1352       table[hash] = next;
1353     else
1354       {
1355 	/* This entry is not in the proper hash bucket.  This can happen
1356 	   when two classes were merged by `merge_equiv_classes'.  Search
1357 	   for the hash bucket that it heads.  This happens only very
1358 	   rarely, so the cost is acceptable.  */
1359 	for (hash = 0; hash < HASH_SIZE; hash++)
1360 	  if (table[hash] == elt)
1361 	    table[hash] = next;
1362       }
1363   }
1364 
1365   /* Remove the table element from its related-value circular chain.  */
1366 
1367   if (elt->related_value != 0 && elt->related_value != elt)
1368     {
1369       struct table_elt *p = elt->related_value;
1370 
1371       while (p->related_value != elt)
1372 	p = p->related_value;
1373       p->related_value = elt->related_value;
1374       if (p->related_value == p)
1375 	p->related_value = 0;
1376     }
1377 
1378   /* Now add it to the free element chain.  */
1379   elt->next_same_hash = free_element_chain;
1380   free_element_chain = elt;
1381 
1382   table_size--;
1383 }
1384 
1385 /* Same as above, but X is a pseudo-register.  */
1386 
1387 static void
remove_pseudo_from_table(rtx x,unsigned int hash)1388 remove_pseudo_from_table (rtx x, unsigned int hash)
1389 {
1390   struct table_elt *elt;
1391 
1392   /* Because a pseudo-register can be referenced in more than one
1393      mode, we might have to remove more than one table entry.  */
1394   while ((elt = lookup_for_remove (x, hash, VOIDmode)))
1395     remove_from_table (elt, hash);
1396 }
1397 
1398 /* Look up X in the hash table and return its table element,
1399    or 0 if X is not in the table.
1400 
1401    MODE is the machine-mode of X, or if X is an integer constant
1402    with VOIDmode then MODE is the mode with which X will be used.
1403 
1404    Here we are satisfied to find an expression whose tree structure
1405    looks like X.  */
1406 
1407 static struct table_elt *
lookup(rtx x,unsigned int hash,enum machine_mode mode)1408 lookup (rtx x, unsigned int hash, enum machine_mode mode)
1409 {
1410   struct table_elt *p;
1411 
1412   for (p = table[hash]; p; p = p->next_same_hash)
1413     if (mode == p->mode && ((x == p->exp && REG_P (x))
1414 			    || exp_equiv_p (x, p->exp, !REG_P (x), false)))
1415       return p;
1416 
1417   return 0;
1418 }
1419 
1420 /* Like `lookup' but don't care whether the table element uses invalid regs.
1421    Also ignore discrepancies in the machine mode of a register.  */
1422 
1423 static struct table_elt *
lookup_for_remove(rtx x,unsigned int hash,enum machine_mode mode)1424 lookup_for_remove (rtx x, unsigned int hash, enum machine_mode mode)
1425 {
1426   struct table_elt *p;
1427 
1428   if (REG_P (x))
1429     {
1430       unsigned int regno = REGNO (x);
1431 
1432       /* Don't check the machine mode when comparing registers;
1433 	 invalidating (REG:SI 0) also invalidates (REG:DF 0).  */
1434       for (p = table[hash]; p; p = p->next_same_hash)
1435 	if (REG_P (p->exp)
1436 	    && REGNO (p->exp) == regno)
1437 	  return p;
1438     }
1439   else
1440     {
1441       for (p = table[hash]; p; p = p->next_same_hash)
1442 	if (mode == p->mode
1443 	    && (x == p->exp || exp_equiv_p (x, p->exp, 0, false)))
1444 	  return p;
1445     }
1446 
1447   return 0;
1448 }
1449 
1450 /* Look for an expression equivalent to X and with code CODE.
1451    If one is found, return that expression.  */
1452 
1453 static rtx
lookup_as_function(rtx x,enum rtx_code code)1454 lookup_as_function (rtx x, enum rtx_code code)
1455 {
1456   struct table_elt *p
1457     = lookup (x, SAFE_HASH (x, VOIDmode), GET_MODE (x));
1458 
1459   /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1460      long as we are narrowing.  So if we looked in vain for a mode narrower
1461      than word_mode before, look for word_mode now.  */
1462   if (p == 0 && code == CONST_INT
1463       && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1464     {
1465       x = copy_rtx (x);
1466       PUT_MODE (x, word_mode);
1467       p = lookup (x, SAFE_HASH (x, VOIDmode), word_mode);
1468     }
1469 
1470   if (p == 0)
1471     return 0;
1472 
1473   for (p = p->first_same_value; p; p = p->next_same_value)
1474     if (GET_CODE (p->exp) == code
1475 	/* Make sure this is a valid entry in the table.  */
1476 	&& exp_equiv_p (p->exp, p->exp, 1, false))
1477       return p->exp;
1478 
1479   return 0;
1480 }
1481 
1482 /* Insert X in the hash table, assuming HASH is its hash code
1483    and CLASSP is an element of the class it should go in
1484    (or 0 if a new class should be made).
1485    It is inserted at the proper position to keep the class in
1486    the order cheapest first.
1487 
1488    MODE is the machine-mode of X, or if X is an integer constant
1489    with VOIDmode then MODE is the mode with which X will be used.
1490 
1491    For elements of equal cheapness, the most recent one
1492    goes in front, except that the first element in the list
1493    remains first unless a cheaper element is added.  The order of
1494    pseudo-registers does not matter, as canon_reg will be called to
1495    find the cheapest when a register is retrieved from the table.
1496 
1497    The in_memory field in the hash table element is set to 0.
1498    The caller must set it nonzero if appropriate.
1499 
1500    You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1501    and if insert_regs returns a nonzero value
1502    you must then recompute its hash code before calling here.
1503 
1504    If necessary, update table showing constant values of quantities.  */
1505 
1506 #define CHEAPER(X, Y) \
1507  (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1508 
1509 static struct table_elt *
insert(rtx x,struct table_elt * classp,unsigned int hash,enum machine_mode mode)1510 insert (rtx x, struct table_elt *classp, unsigned int hash, enum machine_mode mode)
1511 {
1512   struct table_elt *elt;
1513 
1514   /* If X is a register and we haven't made a quantity for it,
1515      something is wrong.  */
1516   gcc_assert (!REG_P (x) || REGNO_QTY_VALID_P (REGNO (x)));
1517 
1518   /* If X is a hard register, show it is being put in the table.  */
1519   if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1520     {
1521       unsigned int regno = REGNO (x);
1522       unsigned int endregno = regno + hard_regno_nregs[regno][GET_MODE (x)];
1523       unsigned int i;
1524 
1525       for (i = regno; i < endregno; i++)
1526 	SET_HARD_REG_BIT (hard_regs_in_table, i);
1527     }
1528 
1529   /* Put an element for X into the right hash bucket.  */
1530 
1531   elt = free_element_chain;
1532   if (elt)
1533     free_element_chain = elt->next_same_hash;
1534   else
1535     elt = XNEW (struct table_elt);
1536 
1537   elt->exp = x;
1538   elt->canon_exp = NULL_RTX;
1539   elt->cost = COST (x);
1540   elt->regcost = approx_reg_cost (x);
1541   elt->next_same_value = 0;
1542   elt->prev_same_value = 0;
1543   elt->next_same_hash = table[hash];
1544   elt->prev_same_hash = 0;
1545   elt->related_value = 0;
1546   elt->in_memory = 0;
1547   elt->mode = mode;
1548   elt->is_const = (CONSTANT_P (x) || fixed_base_plus_p (x));
1549 
1550   if (table[hash])
1551     table[hash]->prev_same_hash = elt;
1552   table[hash] = elt;
1553 
1554   /* Put it into the proper value-class.  */
1555   if (classp)
1556     {
1557       classp = classp->first_same_value;
1558       if (CHEAPER (elt, classp))
1559 	/* Insert at the head of the class.  */
1560 	{
1561 	  struct table_elt *p;
1562 	  elt->next_same_value = classp;
1563 	  classp->prev_same_value = elt;
1564 	  elt->first_same_value = elt;
1565 
1566 	  for (p = classp; p; p = p->next_same_value)
1567 	    p->first_same_value = elt;
1568 	}
1569       else
1570 	{
1571 	  /* Insert not at head of the class.  */
1572 	  /* Put it after the last element cheaper than X.  */
1573 	  struct table_elt *p, *next;
1574 
1575 	  for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1576 	       p = next);
1577 
1578 	  /* Put it after P and before NEXT.  */
1579 	  elt->next_same_value = next;
1580 	  if (next)
1581 	    next->prev_same_value = elt;
1582 
1583 	  elt->prev_same_value = p;
1584 	  p->next_same_value = elt;
1585 	  elt->first_same_value = classp;
1586 	}
1587     }
1588   else
1589     elt->first_same_value = elt;
1590 
1591   /* If this is a constant being set equivalent to a register or a register
1592      being set equivalent to a constant, note the constant equivalence.
1593 
1594      If this is a constant, it cannot be equivalent to a different constant,
1595      and a constant is the only thing that can be cheaper than a register.  So
1596      we know the register is the head of the class (before the constant was
1597      inserted).
1598 
1599      If this is a register that is not already known equivalent to a
1600      constant, we must check the entire class.
1601 
1602      If this is a register that is already known equivalent to an insn,
1603      update the qtys `const_insn' to show that `this_insn' is the latest
1604      insn making that quantity equivalent to the constant.  */
1605 
1606   if (elt->is_const && classp && REG_P (classp->exp)
1607       && !REG_P (x))
1608     {
1609       int exp_q = REG_QTY (REGNO (classp->exp));
1610       struct qty_table_elem *exp_ent = &qty_table[exp_q];
1611 
1612       exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
1613       exp_ent->const_insn = this_insn;
1614     }
1615 
1616   else if (REG_P (x)
1617 	   && classp
1618 	   && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1619 	   && ! elt->is_const)
1620     {
1621       struct table_elt *p;
1622 
1623       for (p = classp; p != 0; p = p->next_same_value)
1624 	{
1625 	  if (p->is_const && !REG_P (p->exp))
1626 	    {
1627 	      int x_q = REG_QTY (REGNO (x));
1628 	      struct qty_table_elem *x_ent = &qty_table[x_q];
1629 
1630 	      x_ent->const_rtx
1631 		= gen_lowpart (GET_MODE (x), p->exp);
1632 	      x_ent->const_insn = this_insn;
1633 	      break;
1634 	    }
1635 	}
1636     }
1637 
1638   else if (REG_P (x)
1639 	   && qty_table[REG_QTY (REGNO (x))].const_rtx
1640 	   && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1641     qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1642 
1643   /* If this is a constant with symbolic value,
1644      and it has a term with an explicit integer value,
1645      link it up with related expressions.  */
1646   if (GET_CODE (x) == CONST)
1647     {
1648       rtx subexp = get_related_value (x);
1649       unsigned subhash;
1650       struct table_elt *subelt, *subelt_prev;
1651 
1652       if (subexp != 0)
1653 	{
1654 	  /* Get the integer-free subexpression in the hash table.  */
1655 	  subhash = SAFE_HASH (subexp, mode);
1656 	  subelt = lookup (subexp, subhash, mode);
1657 	  if (subelt == 0)
1658 	    subelt = insert (subexp, NULL, subhash, mode);
1659 	  /* Initialize SUBELT's circular chain if it has none.  */
1660 	  if (subelt->related_value == 0)
1661 	    subelt->related_value = subelt;
1662 	  /* Find the element in the circular chain that precedes SUBELT.  */
1663 	  subelt_prev = subelt;
1664 	  while (subelt_prev->related_value != subelt)
1665 	    subelt_prev = subelt_prev->related_value;
1666 	  /* Put new ELT into SUBELT's circular chain just before SUBELT.
1667 	     This way the element that follows SUBELT is the oldest one.  */
1668 	  elt->related_value = subelt_prev->related_value;
1669 	  subelt_prev->related_value = elt;
1670 	}
1671     }
1672 
1673   table_size++;
1674 
1675   return elt;
1676 }
1677 
1678 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1679    CLASS2 into CLASS1.  This is done when we have reached an insn which makes
1680    the two classes equivalent.
1681 
1682    CLASS1 will be the surviving class; CLASS2 should not be used after this
1683    call.
1684 
1685    Any invalid entries in CLASS2 will not be copied.  */
1686 
1687 static void
merge_equiv_classes(struct table_elt * class1,struct table_elt * class2)1688 merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
1689 {
1690   struct table_elt *elt, *next, *new;
1691 
1692   /* Ensure we start with the head of the classes.  */
1693   class1 = class1->first_same_value;
1694   class2 = class2->first_same_value;
1695 
1696   /* If they were already equal, forget it.  */
1697   if (class1 == class2)
1698     return;
1699 
1700   for (elt = class2; elt; elt = next)
1701     {
1702       unsigned int hash;
1703       rtx exp = elt->exp;
1704       enum machine_mode mode = elt->mode;
1705 
1706       next = elt->next_same_value;
1707 
1708       /* Remove old entry, make a new one in CLASS1's class.
1709 	 Don't do this for invalid entries as we cannot find their
1710 	 hash code (it also isn't necessary).  */
1711       if (REG_P (exp) || exp_equiv_p (exp, exp, 1, false))
1712 	{
1713 	  bool need_rehash = false;
1714 
1715 	  hash_arg_in_memory = 0;
1716 	  hash = HASH (exp, mode);
1717 
1718 	  if (REG_P (exp))
1719 	    {
1720 	      need_rehash = REGNO_QTY_VALID_P (REGNO (exp));
1721 	      delete_reg_equiv (REGNO (exp));
1722 	    }
1723 
1724 	  if (REG_P (exp) && REGNO (exp) >= FIRST_PSEUDO_REGISTER)
1725 	    remove_pseudo_from_table (exp, hash);
1726 	  else
1727 	    remove_from_table (elt, hash);
1728 
1729 	  if (insert_regs (exp, class1, 0) || need_rehash)
1730 	    {
1731 	      rehash_using_reg (exp);
1732 	      hash = HASH (exp, mode);
1733 	    }
1734 	  new = insert (exp, class1, hash, mode);
1735 	  new->in_memory = hash_arg_in_memory;
1736 	}
1737     }
1738 }
1739 
1740 /* Flush the entire hash table.  */
1741 
1742 static void
flush_hash_table(void)1743 flush_hash_table (void)
1744 {
1745   int i;
1746   struct table_elt *p;
1747 
1748   for (i = 0; i < HASH_SIZE; i++)
1749     for (p = table[i]; p; p = table[i])
1750       {
1751 	/* Note that invalidate can remove elements
1752 	   after P in the current hash chain.  */
1753 	if (REG_P (p->exp))
1754 	  invalidate (p->exp, VOIDmode);
1755 	else
1756 	  remove_from_table (p, i);
1757       }
1758 }
1759 
1760 /* Function called for each rtx to check whether true dependence exist.  */
1761 struct check_dependence_data
1762 {
1763   enum machine_mode mode;
1764   rtx exp;
1765   rtx addr;
1766 };
1767 
1768 static int
check_dependence(rtx * x,void * data)1769 check_dependence (rtx *x, void *data)
1770 {
1771   struct check_dependence_data *d = (struct check_dependence_data *) data;
1772   if (*x && MEM_P (*x))
1773     return canon_true_dependence (d->exp, d->mode, d->addr, *x,
1774 		    		  cse_rtx_varies_p);
1775   else
1776     return 0;
1777 }
1778 
1779 /* Remove from the hash table, or mark as invalid, all expressions whose
1780    values could be altered by storing in X.  X is a register, a subreg, or
1781    a memory reference with nonvarying address (because, when a memory
1782    reference with a varying address is stored in, all memory references are
1783    removed by invalidate_memory so specific invalidation is superfluous).
1784    FULL_MODE, if not VOIDmode, indicates that this much should be
1785    invalidated instead of just the amount indicated by the mode of X.  This
1786    is only used for bitfield stores into memory.
1787 
1788    A nonvarying address may be just a register or just a symbol reference,
1789    or it may be either of those plus a numeric offset.  */
1790 
1791 static void
invalidate(rtx x,enum machine_mode full_mode)1792 invalidate (rtx x, enum machine_mode full_mode)
1793 {
1794   int i;
1795   struct table_elt *p;
1796   rtx addr;
1797 
1798   switch (GET_CODE (x))
1799     {
1800     case REG:
1801       {
1802 	/* If X is a register, dependencies on its contents are recorded
1803 	   through the qty number mechanism.  Just change the qty number of
1804 	   the register, mark it as invalid for expressions that refer to it,
1805 	   and remove it itself.  */
1806 	unsigned int regno = REGNO (x);
1807 	unsigned int hash = HASH (x, GET_MODE (x));
1808 
1809 	/* Remove REGNO from any quantity list it might be on and indicate
1810 	   that its value might have changed.  If it is a pseudo, remove its
1811 	   entry from the hash table.
1812 
1813 	   For a hard register, we do the first two actions above for any
1814 	   additional hard registers corresponding to X.  Then, if any of these
1815 	   registers are in the table, we must remove any REG entries that
1816 	   overlap these registers.  */
1817 
1818 	delete_reg_equiv (regno);
1819 	REG_TICK (regno)++;
1820 	SUBREG_TICKED (regno) = -1;
1821 
1822 	if (regno >= FIRST_PSEUDO_REGISTER)
1823 	  remove_pseudo_from_table (x, hash);
1824 	else
1825 	  {
1826 	    HOST_WIDE_INT in_table
1827 	      = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1828 	    unsigned int endregno
1829 	      = regno + hard_regno_nregs[regno][GET_MODE (x)];
1830 	    unsigned int tregno, tendregno, rn;
1831 	    struct table_elt *p, *next;
1832 
1833 	    CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1834 
1835 	    for (rn = regno + 1; rn < endregno; rn++)
1836 	      {
1837 		in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1838 		CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1839 		delete_reg_equiv (rn);
1840 		REG_TICK (rn)++;
1841 		SUBREG_TICKED (rn) = -1;
1842 	      }
1843 
1844 	    if (in_table)
1845 	      for (hash = 0; hash < HASH_SIZE; hash++)
1846 		for (p = table[hash]; p; p = next)
1847 		  {
1848 		    next = p->next_same_hash;
1849 
1850 		    if (!REG_P (p->exp)
1851 			|| REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1852 		      continue;
1853 
1854 		    tregno = REGNO (p->exp);
1855 		    tendregno
1856 		      = tregno + hard_regno_nregs[tregno][GET_MODE (p->exp)];
1857 		    if (tendregno > regno && tregno < endregno)
1858 		      remove_from_table (p, hash);
1859 		  }
1860 	  }
1861       }
1862       return;
1863 
1864     case SUBREG:
1865       invalidate (SUBREG_REG (x), VOIDmode);
1866       return;
1867 
1868     case PARALLEL:
1869       for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1870 	invalidate (XVECEXP (x, 0, i), VOIDmode);
1871       return;
1872 
1873     case EXPR_LIST:
1874       /* This is part of a disjoint return value; extract the location in
1875 	 question ignoring the offset.  */
1876       invalidate (XEXP (x, 0), VOIDmode);
1877       return;
1878 
1879     case MEM:
1880       addr = canon_rtx (get_addr (XEXP (x, 0)));
1881       /* Calculate the canonical version of X here so that
1882 	 true_dependence doesn't generate new RTL for X on each call.  */
1883       x = canon_rtx (x);
1884 
1885       /* Remove all hash table elements that refer to overlapping pieces of
1886 	 memory.  */
1887       if (full_mode == VOIDmode)
1888 	full_mode = GET_MODE (x);
1889 
1890       for (i = 0; i < HASH_SIZE; i++)
1891 	{
1892 	  struct table_elt *next;
1893 
1894 	  for (p = table[i]; p; p = next)
1895 	    {
1896 	      next = p->next_same_hash;
1897 	      if (p->in_memory)
1898 		{
1899 		  struct check_dependence_data d;
1900 
1901 		  /* Just canonicalize the expression once;
1902 		     otherwise each time we call invalidate
1903 		     true_dependence will canonicalize the
1904 		     expression again.  */
1905 		  if (!p->canon_exp)
1906 		    p->canon_exp = canon_rtx (p->exp);
1907 		  d.exp = x;
1908 		  d.addr = addr;
1909 		  d.mode = full_mode;
1910 		  if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1911 		    remove_from_table (p, i);
1912 		}
1913 	    }
1914 	}
1915       return;
1916 
1917     default:
1918       gcc_unreachable ();
1919     }
1920 }
1921 
1922 /* Remove all expressions that refer to register REGNO,
1923    since they are already invalid, and we are about to
1924    mark that register valid again and don't want the old
1925    expressions to reappear as valid.  */
1926 
1927 static void
remove_invalid_refs(unsigned int regno)1928 remove_invalid_refs (unsigned int regno)
1929 {
1930   unsigned int i;
1931   struct table_elt *p, *next;
1932 
1933   for (i = 0; i < HASH_SIZE; i++)
1934     for (p = table[i]; p; p = next)
1935       {
1936 	next = p->next_same_hash;
1937 	if (!REG_P (p->exp)
1938 	    && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1939 	  remove_from_table (p, i);
1940       }
1941 }
1942 
1943 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1944    and mode MODE.  */
1945 static void
remove_invalid_subreg_refs(unsigned int regno,unsigned int offset,enum machine_mode mode)1946 remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
1947 			    enum machine_mode mode)
1948 {
1949   unsigned int i;
1950   struct table_elt *p, *next;
1951   unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
1952 
1953   for (i = 0; i < HASH_SIZE; i++)
1954     for (p = table[i]; p; p = next)
1955       {
1956 	rtx exp = p->exp;
1957 	next = p->next_same_hash;
1958 
1959 	if (!REG_P (exp)
1960 	    && (GET_CODE (exp) != SUBREG
1961 		|| !REG_P (SUBREG_REG (exp))
1962 		|| REGNO (SUBREG_REG (exp)) != regno
1963 		|| (((SUBREG_BYTE (exp)
1964 		      + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
1965 		    && SUBREG_BYTE (exp) <= end))
1966 	    && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1967 	  remove_from_table (p, i);
1968       }
1969 }
1970 
1971 /* Recompute the hash codes of any valid entries in the hash table that
1972    reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1973 
1974    This is called when we make a jump equivalence.  */
1975 
1976 static void
rehash_using_reg(rtx x)1977 rehash_using_reg (rtx x)
1978 {
1979   unsigned int i;
1980   struct table_elt *p, *next;
1981   unsigned hash;
1982 
1983   if (GET_CODE (x) == SUBREG)
1984     x = SUBREG_REG (x);
1985 
1986   /* If X is not a register or if the register is known not to be in any
1987      valid entries in the table, we have no work to do.  */
1988 
1989   if (!REG_P (x)
1990       || REG_IN_TABLE (REGNO (x)) < 0
1991       || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
1992     return;
1993 
1994   /* Scan all hash chains looking for valid entries that mention X.
1995      If we find one and it is in the wrong hash chain, move it.  */
1996 
1997   for (i = 0; i < HASH_SIZE; i++)
1998     for (p = table[i]; p; p = next)
1999       {
2000 	next = p->next_same_hash;
2001 	if (reg_mentioned_p (x, p->exp)
2002 	    && exp_equiv_p (p->exp, p->exp, 1, false)
2003 	    && i != (hash = SAFE_HASH (p->exp, p->mode)))
2004 	  {
2005 	    if (p->next_same_hash)
2006 	      p->next_same_hash->prev_same_hash = p->prev_same_hash;
2007 
2008 	    if (p->prev_same_hash)
2009 	      p->prev_same_hash->next_same_hash = p->next_same_hash;
2010 	    else
2011 	      table[i] = p->next_same_hash;
2012 
2013 	    p->next_same_hash = table[hash];
2014 	    p->prev_same_hash = 0;
2015 	    if (table[hash])
2016 	      table[hash]->prev_same_hash = p;
2017 	    table[hash] = p;
2018 	  }
2019       }
2020 }
2021 
2022 /* Remove from the hash table any expression that is a call-clobbered
2023    register.  Also update their TICK values.  */
2024 
2025 static void
invalidate_for_call(void)2026 invalidate_for_call (void)
2027 {
2028   unsigned int regno, endregno;
2029   unsigned int i;
2030   unsigned hash;
2031   struct table_elt *p, *next;
2032   int in_table = 0;
2033 
2034   /* Go through all the hard registers.  For each that is clobbered in
2035      a CALL_INSN, remove the register from quantity chains and update
2036      reg_tick if defined.  Also see if any of these registers is currently
2037      in the table.  */
2038 
2039   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2040     if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2041       {
2042 	delete_reg_equiv (regno);
2043 	if (REG_TICK (regno) >= 0)
2044 	  {
2045 	    REG_TICK (regno)++;
2046 	    SUBREG_TICKED (regno) = -1;
2047 	  }
2048 
2049 	in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2050       }
2051 
2052   /* In the case where we have no call-clobbered hard registers in the
2053      table, we are done.  Otherwise, scan the table and remove any
2054      entry that overlaps a call-clobbered register.  */
2055 
2056   if (in_table)
2057     for (hash = 0; hash < HASH_SIZE; hash++)
2058       for (p = table[hash]; p; p = next)
2059 	{
2060 	  next = p->next_same_hash;
2061 
2062 	  if (!REG_P (p->exp)
2063 	      || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2064 	    continue;
2065 
2066 	  regno = REGNO (p->exp);
2067 	  endregno = regno + hard_regno_nregs[regno][GET_MODE (p->exp)];
2068 
2069 	  for (i = regno; i < endregno; i++)
2070 	    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2071 	      {
2072 		remove_from_table (p, hash);
2073 		break;
2074 	      }
2075 	}
2076 }
2077 
2078 /* Given an expression X of type CONST,
2079    and ELT which is its table entry (or 0 if it
2080    is not in the hash table),
2081    return an alternate expression for X as a register plus integer.
2082    If none can be found, return 0.  */
2083 
2084 static rtx
use_related_value(rtx x,struct table_elt * elt)2085 use_related_value (rtx x, struct table_elt *elt)
2086 {
2087   struct table_elt *relt = 0;
2088   struct table_elt *p, *q;
2089   HOST_WIDE_INT offset;
2090 
2091   /* First, is there anything related known?
2092      If we have a table element, we can tell from that.
2093      Otherwise, must look it up.  */
2094 
2095   if (elt != 0 && elt->related_value != 0)
2096     relt = elt;
2097   else if (elt == 0 && GET_CODE (x) == CONST)
2098     {
2099       rtx subexp = get_related_value (x);
2100       if (subexp != 0)
2101 	relt = lookup (subexp,
2102 		       SAFE_HASH (subexp, GET_MODE (subexp)),
2103 		       GET_MODE (subexp));
2104     }
2105 
2106   if (relt == 0)
2107     return 0;
2108 
2109   /* Search all related table entries for one that has an
2110      equivalent register.  */
2111 
2112   p = relt;
2113   while (1)
2114     {
2115       /* This loop is strange in that it is executed in two different cases.
2116 	 The first is when X is already in the table.  Then it is searching
2117 	 the RELATED_VALUE list of X's class (RELT).  The second case is when
2118 	 X is not in the table.  Then RELT points to a class for the related
2119 	 value.
2120 
2121 	 Ensure that, whatever case we are in, that we ignore classes that have
2122 	 the same value as X.  */
2123 
2124       if (rtx_equal_p (x, p->exp))
2125 	q = 0;
2126       else
2127 	for (q = p->first_same_value; q; q = q->next_same_value)
2128 	  if (REG_P (q->exp))
2129 	    break;
2130 
2131       if (q)
2132 	break;
2133 
2134       p = p->related_value;
2135 
2136       /* We went all the way around, so there is nothing to be found.
2137 	 Alternatively, perhaps RELT was in the table for some other reason
2138 	 and it has no related values recorded.  */
2139       if (p == relt || p == 0)
2140 	break;
2141     }
2142 
2143   if (q == 0)
2144     return 0;
2145 
2146   offset = (get_integer_term (x) - get_integer_term (p->exp));
2147   /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity.  */
2148   return plus_constant (q->exp, offset);
2149 }
2150 
2151 /* Hash a string.  Just add its bytes up.  */
2152 static inline unsigned
hash_rtx_string(const char * ps)2153 hash_rtx_string (const char *ps)
2154 {
2155   unsigned hash = 0;
2156   const unsigned char *p = (const unsigned char *) ps;
2157 
2158   if (p)
2159     while (*p)
2160       hash += *p++;
2161 
2162   return hash;
2163 }
2164 
2165 /* Hash an rtx.  We are careful to make sure the value is never negative.
2166    Equivalent registers hash identically.
2167    MODE is used in hashing for CONST_INTs only;
2168    otherwise the mode of X is used.
2169 
2170    Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
2171 
2172    If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2173    a MEM rtx which does not have the RTX_UNCHANGING_P bit set.
2174 
2175    Note that cse_insn knows that the hash code of a MEM expression
2176    is just (int) MEM plus the hash code of the address.  */
2177 
2178 unsigned
hash_rtx(rtx x,enum machine_mode mode,int * do_not_record_p,int * hash_arg_in_memory_p,bool have_reg_qty)2179 hash_rtx (rtx x, enum machine_mode mode, int *do_not_record_p,
2180 	  int *hash_arg_in_memory_p, bool have_reg_qty)
2181 {
2182   int i, j;
2183   unsigned hash = 0;
2184   enum rtx_code code;
2185   const char *fmt;
2186 
2187   /* Used to turn recursion into iteration.  We can't rely on GCC's
2188      tail-recursion elimination since we need to keep accumulating values
2189      in HASH.  */
2190  repeat:
2191   if (x == 0)
2192     return hash;
2193 
2194   code = GET_CODE (x);
2195   switch (code)
2196     {
2197     case REG:
2198       {
2199 	unsigned int regno = REGNO (x);
2200 
2201 	if (!reload_completed)
2202 	  {
2203 	    /* On some machines, we can't record any non-fixed hard register,
2204 	       because extending its life will cause reload problems.  We
2205 	       consider ap, fp, sp, gp to be fixed for this purpose.
2206 
2207 	       We also consider CCmode registers to be fixed for this purpose;
2208 	       failure to do so leads to failure to simplify 0<100 type of
2209 	       conditionals.
2210 
2211 	       On all machines, we can't record any global registers.
2212 	       Nor should we record any register that is in a small
2213 	       class, as defined by CLASS_LIKELY_SPILLED_P.  */
2214 	    bool record;
2215 
2216 	    if (regno >= FIRST_PSEUDO_REGISTER)
2217 	      record = true;
2218 	    else if (x == frame_pointer_rtx
2219 		     || x == hard_frame_pointer_rtx
2220 		     || x == arg_pointer_rtx
2221 		     || x == stack_pointer_rtx
2222 		     || x == pic_offset_table_rtx)
2223 	      record = true;
2224 	    else if (global_regs[regno])
2225 	      record = false;
2226 	    else if (fixed_regs[regno])
2227 	      record = true;
2228 	    else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2229 	      record = true;
2230 	    else if (SMALL_REGISTER_CLASSES)
2231 	      record = false;
2232 	    else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
2233 	      record = false;
2234 	    else
2235 	      record = true;
2236 
2237 	    if (!record)
2238 	      {
2239 		*do_not_record_p = 1;
2240 		return 0;
2241 	      }
2242 	  }
2243 
2244 	hash += ((unsigned int) REG << 7);
2245         hash += (have_reg_qty ? (unsigned) REG_QTY (regno) : regno);
2246 	return hash;
2247       }
2248 
2249     /* We handle SUBREG of a REG specially because the underlying
2250        reg changes its hash value with every value change; we don't
2251        want to have to forget unrelated subregs when one subreg changes.  */
2252     case SUBREG:
2253       {
2254 	if (REG_P (SUBREG_REG (x)))
2255 	  {
2256 	    hash += (((unsigned int) SUBREG << 7)
2257 		     + REGNO (SUBREG_REG (x))
2258 		     + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2259 	    return hash;
2260 	  }
2261 	break;
2262       }
2263 
2264     case CONST_INT:
2265       hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
2266                + (unsigned int) INTVAL (x));
2267       return hash;
2268 
2269     case CONST_DOUBLE:
2270       /* This is like the general case, except that it only counts
2271 	 the integers representing the constant.  */
2272       hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2273       if (GET_MODE (x) != VOIDmode)
2274 	hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2275       else
2276 	hash += ((unsigned int) CONST_DOUBLE_LOW (x)
2277 		 + (unsigned int) CONST_DOUBLE_HIGH (x));
2278       return hash;
2279 
2280     case CONST_VECTOR:
2281       {
2282 	int units;
2283 	rtx elt;
2284 
2285 	units = CONST_VECTOR_NUNITS (x);
2286 
2287 	for (i = 0; i < units; ++i)
2288 	  {
2289 	    elt = CONST_VECTOR_ELT (x, i);
2290 	    hash += hash_rtx (elt, GET_MODE (elt), do_not_record_p,
2291 			      hash_arg_in_memory_p, have_reg_qty);
2292 	  }
2293 
2294 	return hash;
2295       }
2296 
2297       /* Assume there is only one rtx object for any given label.  */
2298     case LABEL_REF:
2299       /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2300 	 differences and differences between each stage's debugging dumps.  */
2301 	 hash += (((unsigned int) LABEL_REF << 7)
2302 		  + CODE_LABEL_NUMBER (XEXP (x, 0)));
2303       return hash;
2304 
2305     case SYMBOL_REF:
2306       {
2307 	/* Don't hash on the symbol's address to avoid bootstrap differences.
2308 	   Different hash values may cause expressions to be recorded in
2309 	   different orders and thus different registers to be used in the
2310 	   final assembler.  This also avoids differences in the dump files
2311 	   between various stages.  */
2312 	unsigned int h = 0;
2313 	const unsigned char *p = (const unsigned char *) XSTR (x, 0);
2314 
2315 	while (*p)
2316 	  h += (h << 7) + *p++; /* ??? revisit */
2317 
2318 	hash += ((unsigned int) SYMBOL_REF << 7) + h;
2319 	return hash;
2320       }
2321 
2322     case MEM:
2323       /* We don't record if marked volatile or if BLKmode since we don't
2324 	 know the size of the move.  */
2325       if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2326 	{
2327 	  *do_not_record_p = 1;
2328 	  return 0;
2329 	}
2330       if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2331 	*hash_arg_in_memory_p = 1;
2332 
2333       /* Now that we have already found this special case,
2334 	 might as well speed it up as much as possible.  */
2335       hash += (unsigned) MEM;
2336       x = XEXP (x, 0);
2337       goto repeat;
2338 
2339     case USE:
2340       /* A USE that mentions non-volatile memory needs special
2341 	 handling since the MEM may be BLKmode which normally
2342 	 prevents an entry from being made.  Pure calls are
2343 	 marked by a USE which mentions BLKmode memory.
2344 	 See calls.c:emit_call_1.  */
2345       if (MEM_P (XEXP (x, 0))
2346 	  && ! MEM_VOLATILE_P (XEXP (x, 0)))
2347 	{
2348 	  hash += (unsigned) USE;
2349 	  x = XEXP (x, 0);
2350 
2351 	  if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2352 	    *hash_arg_in_memory_p = 1;
2353 
2354 	  /* Now that we have already found this special case,
2355 	     might as well speed it up as much as possible.  */
2356 	  hash += (unsigned) MEM;
2357 	  x = XEXP (x, 0);
2358 	  goto repeat;
2359 	}
2360       break;
2361 
2362     case PRE_DEC:
2363     case PRE_INC:
2364     case POST_DEC:
2365     case POST_INC:
2366     case PRE_MODIFY:
2367     case POST_MODIFY:
2368     case PC:
2369     case CC0:
2370     case CALL:
2371     case UNSPEC_VOLATILE:
2372       *do_not_record_p = 1;
2373       return 0;
2374 
2375     case ASM_OPERANDS:
2376       if (MEM_VOLATILE_P (x))
2377 	{
2378 	  *do_not_record_p = 1;
2379 	  return 0;
2380 	}
2381       else
2382 	{
2383 	  /* We don't want to take the filename and line into account.  */
2384 	  hash += (unsigned) code + (unsigned) GET_MODE (x)
2385 	    + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x))
2386 	    + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2387 	    + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2388 
2389 	  if (ASM_OPERANDS_INPUT_LENGTH (x))
2390 	    {
2391 	      for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2392 		{
2393 		  hash += (hash_rtx (ASM_OPERANDS_INPUT (x, i),
2394 				     GET_MODE (ASM_OPERANDS_INPUT (x, i)),
2395 				     do_not_record_p, hash_arg_in_memory_p,
2396 				     have_reg_qty)
2397 			   + hash_rtx_string
2398 				(ASM_OPERANDS_INPUT_CONSTRAINT (x, i)));
2399 		}
2400 
2401 	      hash += hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2402 	      x = ASM_OPERANDS_INPUT (x, 0);
2403 	      mode = GET_MODE (x);
2404 	      goto repeat;
2405 	    }
2406 
2407 	  return hash;
2408 	}
2409       break;
2410 
2411     default:
2412       break;
2413     }
2414 
2415   i = GET_RTX_LENGTH (code) - 1;
2416   hash += (unsigned) code + (unsigned) GET_MODE (x);
2417   fmt = GET_RTX_FORMAT (code);
2418   for (; i >= 0; i--)
2419     {
2420       switch (fmt[i])
2421 	{
2422 	case 'e':
2423 	  /* If we are about to do the last recursive call
2424 	     needed at this level, change it into iteration.
2425 	     This function  is called enough to be worth it.  */
2426 	  if (i == 0)
2427 	    {
2428 	      x = XEXP (x, i);
2429 	      goto repeat;
2430 	    }
2431 
2432 	  hash += hash_rtx (XEXP (x, i), 0, do_not_record_p,
2433 			    hash_arg_in_memory_p, have_reg_qty);
2434 	  break;
2435 
2436 	case 'E':
2437 	  for (j = 0; j < XVECLEN (x, i); j++)
2438 	    hash += hash_rtx (XVECEXP (x, i, j), 0, do_not_record_p,
2439 			      hash_arg_in_memory_p, have_reg_qty);
2440 	  break;
2441 
2442 	case 's':
2443 	  hash += hash_rtx_string (XSTR (x, i));
2444 	  break;
2445 
2446 	case 'i':
2447 	  hash += (unsigned int) XINT (x, i);
2448 	  break;
2449 
2450 	case '0': case 't':
2451 	  /* Unused.  */
2452 	  break;
2453 
2454 	default:
2455 	  gcc_unreachable ();
2456 	}
2457     }
2458 
2459   return hash;
2460 }
2461 
2462 /* Hash an rtx X for cse via hash_rtx.
2463    Stores 1 in do_not_record if any subexpression is volatile.
2464    Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2465    does not have the RTX_UNCHANGING_P bit set.  */
2466 
2467 static inline unsigned
canon_hash(rtx x,enum machine_mode mode)2468 canon_hash (rtx x, enum machine_mode mode)
2469 {
2470   return hash_rtx (x, mode, &do_not_record, &hash_arg_in_memory, true);
2471 }
2472 
2473 /* Like canon_hash but with no side effects, i.e. do_not_record
2474    and hash_arg_in_memory are not changed.  */
2475 
2476 static inline unsigned
safe_hash(rtx x,enum machine_mode mode)2477 safe_hash (rtx x, enum machine_mode mode)
2478 {
2479   int dummy_do_not_record;
2480   return hash_rtx (x, mode, &dummy_do_not_record, NULL, true);
2481 }
2482 
2483 /* Return 1 iff X and Y would canonicalize into the same thing,
2484    without actually constructing the canonicalization of either one.
2485    If VALIDATE is nonzero,
2486    we assume X is an expression being processed from the rtl
2487    and Y was found in the hash table.  We check register refs
2488    in Y for being marked as valid.
2489 
2490    If FOR_GCSE is true, we compare X and Y for equivalence for GCSE.  */
2491 
2492 int
exp_equiv_p(rtx x,rtx y,int validate,bool for_gcse)2493 exp_equiv_p (rtx x, rtx y, int validate, bool for_gcse)
2494 {
2495   int i, j;
2496   enum rtx_code code;
2497   const char *fmt;
2498 
2499   /* Note: it is incorrect to assume an expression is equivalent to itself
2500      if VALIDATE is nonzero.  */
2501   if (x == y && !validate)
2502     return 1;
2503 
2504   if (x == 0 || y == 0)
2505     return x == y;
2506 
2507   code = GET_CODE (x);
2508   if (code != GET_CODE (y))
2509     return 0;
2510 
2511   /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
2512   if (GET_MODE (x) != GET_MODE (y))
2513     return 0;
2514 
2515   switch (code)
2516     {
2517     case PC:
2518     case CC0:
2519     case CONST_INT:
2520     case CONST_DOUBLE:
2521       return x == y;
2522 
2523     case LABEL_REF:
2524       return XEXP (x, 0) == XEXP (y, 0);
2525 
2526     case SYMBOL_REF:
2527       return XSTR (x, 0) == XSTR (y, 0);
2528 
2529     case REG:
2530       if (for_gcse)
2531 	return REGNO (x) == REGNO (y);
2532       else
2533 	{
2534 	  unsigned int regno = REGNO (y);
2535 	  unsigned int i;
2536 	  unsigned int endregno
2537 	    = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2538 		       : hard_regno_nregs[regno][GET_MODE (y)]);
2539 
2540 	  /* If the quantities are not the same, the expressions are not
2541 	     equivalent.  If there are and we are not to validate, they
2542 	     are equivalent.  Otherwise, ensure all regs are up-to-date.  */
2543 
2544 	  if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2545 	    return 0;
2546 
2547 	  if (! validate)
2548 	    return 1;
2549 
2550 	  for (i = regno; i < endregno; i++)
2551 	    if (REG_IN_TABLE (i) != REG_TICK (i))
2552 	      return 0;
2553 
2554 	  return 1;
2555 	}
2556 
2557     case MEM:
2558       if (for_gcse)
2559 	{
2560 	  /* A volatile mem should not be considered equivalent to any
2561 	     other.  */
2562 	  if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2563 	    return 0;
2564 
2565 	  /* Can't merge two expressions in different alias sets, since we
2566 	     can decide that the expression is transparent in a block when
2567 	     it isn't, due to it being set with the different alias set.
2568 
2569 	     Also, can't merge two expressions with different MEM_ATTRS.
2570 	     They could e.g. be two different entities allocated into the
2571 	     same space on the stack (see e.g. PR25130).  In that case, the
2572 	     MEM addresses can be the same, even though the two MEMs are
2573 	     absolutely not equivalent.
2574 
2575 	     But because really all MEM attributes should be the same for
2576 	     equivalent MEMs, we just use the invariant that MEMs that have
2577 	     the same attributes share the same mem_attrs data structure.  */
2578 	  if (MEM_ATTRS (x) != MEM_ATTRS (y))
2579 	    return 0;
2580 	}
2581       break;
2582 
2583     /*  For commutative operations, check both orders.  */
2584     case PLUS:
2585     case MULT:
2586     case AND:
2587     case IOR:
2588     case XOR:
2589     case NE:
2590     case EQ:
2591       return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0),
2592 			     validate, for_gcse)
2593 	       && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2594 				validate, for_gcse))
2595 	      || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2596 				validate, for_gcse)
2597 		  && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2598 				   validate, for_gcse)));
2599 
2600     case ASM_OPERANDS:
2601       /* We don't use the generic code below because we want to
2602 	 disregard filename and line numbers.  */
2603 
2604       /* A volatile asm isn't equivalent to any other.  */
2605       if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2606 	return 0;
2607 
2608       if (GET_MODE (x) != GET_MODE (y)
2609 	  || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2610 	  || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2611 		     ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2612 	  || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2613 	  || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2614 	return 0;
2615 
2616       if (ASM_OPERANDS_INPUT_LENGTH (x))
2617 	{
2618 	  for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2619 	    if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2620 			       ASM_OPERANDS_INPUT (y, i),
2621 			       validate, for_gcse)
2622 		|| strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2623 			   ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2624 	      return 0;
2625 	}
2626 
2627       return 1;
2628 
2629     default:
2630       break;
2631     }
2632 
2633   /* Compare the elements.  If any pair of corresponding elements
2634      fail to match, return 0 for the whole thing.  */
2635 
2636   fmt = GET_RTX_FORMAT (code);
2637   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2638     {
2639       switch (fmt[i])
2640 	{
2641 	case 'e':
2642 	  if (! exp_equiv_p (XEXP (x, i), XEXP (y, i),
2643 			      validate, for_gcse))
2644 	    return 0;
2645 	  break;
2646 
2647 	case 'E':
2648 	  if (XVECLEN (x, i) != XVECLEN (y, i))
2649 	    return 0;
2650 	  for (j = 0; j < XVECLEN (x, i); j++)
2651 	    if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2652 				validate, for_gcse))
2653 	      return 0;
2654 	  break;
2655 
2656 	case 's':
2657 	  if (strcmp (XSTR (x, i), XSTR (y, i)))
2658 	    return 0;
2659 	  break;
2660 
2661 	case 'i':
2662 	  if (XINT (x, i) != XINT (y, i))
2663 	    return 0;
2664 	  break;
2665 
2666 	case 'w':
2667 	  if (XWINT (x, i) != XWINT (y, i))
2668 	    return 0;
2669 	  break;
2670 
2671 	case '0':
2672 	case 't':
2673 	  break;
2674 
2675 	default:
2676 	  gcc_unreachable ();
2677 	}
2678     }
2679 
2680   return 1;
2681 }
2682 
2683 /* Return 1 if X has a value that can vary even between two
2684    executions of the program.  0 means X can be compared reliably
2685    against certain constants or near-constants.  */
2686 
2687 static int
cse_rtx_varies_p(rtx x,int from_alias)2688 cse_rtx_varies_p (rtx x, int from_alias)
2689 {
2690   /* We need not check for X and the equivalence class being of the same
2691      mode because if X is equivalent to a constant in some mode, it
2692      doesn't vary in any mode.  */
2693 
2694   if (REG_P (x)
2695       && REGNO_QTY_VALID_P (REGNO (x)))
2696     {
2697       int x_q = REG_QTY (REGNO (x));
2698       struct qty_table_elem *x_ent = &qty_table[x_q];
2699 
2700       if (GET_MODE (x) == x_ent->mode
2701 	  && x_ent->const_rtx != NULL_RTX)
2702 	return 0;
2703     }
2704 
2705   if (GET_CODE (x) == PLUS
2706       && GET_CODE (XEXP (x, 1)) == CONST_INT
2707       && REG_P (XEXP (x, 0))
2708       && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2709     {
2710       int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2711       struct qty_table_elem *x0_ent = &qty_table[x0_q];
2712 
2713       if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2714 	  && x0_ent->const_rtx != NULL_RTX)
2715 	return 0;
2716     }
2717 
2718   /* This can happen as the result of virtual register instantiation, if
2719      the initial constant is too large to be a valid address.  This gives
2720      us a three instruction sequence, load large offset into a register,
2721      load fp minus a constant into a register, then a MEM which is the
2722      sum of the two `constant' registers.  */
2723   if (GET_CODE (x) == PLUS
2724       && REG_P (XEXP (x, 0))
2725       && REG_P (XEXP (x, 1))
2726       && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2727       && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2728     {
2729       int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2730       int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2731       struct qty_table_elem *x0_ent = &qty_table[x0_q];
2732       struct qty_table_elem *x1_ent = &qty_table[x1_q];
2733 
2734       if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2735 	  && x0_ent->const_rtx != NULL_RTX
2736 	  && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2737 	  && x1_ent->const_rtx != NULL_RTX)
2738 	return 0;
2739     }
2740 
2741   return rtx_varies_p (x, from_alias);
2742 }
2743 
2744 /* Subroutine of canon_reg.  Pass *XLOC through canon_reg, and validate
2745    the result if necessary.  INSN is as for canon_reg.  */
2746 
2747 static void
validate_canon_reg(rtx * xloc,rtx insn)2748 validate_canon_reg (rtx *xloc, rtx insn)
2749 {
2750   rtx new = canon_reg (*xloc, insn);
2751 
2752   /* If replacing pseudo with hard reg or vice versa, ensure the
2753      insn remains valid.  Likewise if the insn has MATCH_DUPs.  */
2754   if (insn != 0 && new != 0)
2755     validate_change (insn, xloc, new, 1);
2756   else
2757     *xloc = new;
2758 }
2759 
2760 /* Canonicalize an expression:
2761    replace each register reference inside it
2762    with the "oldest" equivalent register.
2763 
2764    If INSN is nonzero validate_change is used to ensure that INSN remains valid
2765    after we make our substitution.  The calls are made with IN_GROUP nonzero
2766    so apply_change_group must be called upon the outermost return from this
2767    function (unless INSN is zero).  The result of apply_change_group can
2768    generally be discarded since the changes we are making are optional.  */
2769 
2770 static rtx
canon_reg(rtx x,rtx insn)2771 canon_reg (rtx x, rtx insn)
2772 {
2773   int i;
2774   enum rtx_code code;
2775   const char *fmt;
2776 
2777   if (x == 0)
2778     return x;
2779 
2780   code = GET_CODE (x);
2781   switch (code)
2782     {
2783     case PC:
2784     case CC0:
2785     case CONST:
2786     case CONST_INT:
2787     case CONST_DOUBLE:
2788     case CONST_VECTOR:
2789     case SYMBOL_REF:
2790     case LABEL_REF:
2791     case ADDR_VEC:
2792     case ADDR_DIFF_VEC:
2793       return x;
2794 
2795     case REG:
2796       {
2797 	int first;
2798 	int q;
2799 	struct qty_table_elem *ent;
2800 
2801 	/* Never replace a hard reg, because hard regs can appear
2802 	   in more than one machine mode, and we must preserve the mode
2803 	   of each occurrence.  Also, some hard regs appear in
2804 	   MEMs that are shared and mustn't be altered.  Don't try to
2805 	   replace any reg that maps to a reg of class NO_REGS.  */
2806 	if (REGNO (x) < FIRST_PSEUDO_REGISTER
2807 	    || ! REGNO_QTY_VALID_P (REGNO (x)))
2808 	  return x;
2809 
2810 	q = REG_QTY (REGNO (x));
2811 	ent = &qty_table[q];
2812 	first = ent->first_reg;
2813 	return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2814 		: REGNO_REG_CLASS (first) == NO_REGS ? x
2815 		: gen_rtx_REG (ent->mode, first));
2816       }
2817 
2818     default:
2819       break;
2820     }
2821 
2822   fmt = GET_RTX_FORMAT (code);
2823   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2824     {
2825       int j;
2826 
2827       if (fmt[i] == 'e')
2828 	validate_canon_reg (&XEXP (x, i), insn);
2829       else if (fmt[i] == 'E')
2830 	for (j = 0; j < XVECLEN (x, i); j++)
2831 	  validate_canon_reg (&XVECEXP (x, i, j), insn);
2832     }
2833 
2834   return x;
2835 }
2836 
2837 /* LOC is a location within INSN that is an operand address (the contents of
2838    a MEM).  Find the best equivalent address to use that is valid for this
2839    insn.
2840 
2841    On most CISC machines, complicated address modes are costly, and rtx_cost
2842    is a good approximation for that cost.  However, most RISC machines have
2843    only a few (usually only one) memory reference formats.  If an address is
2844    valid at all, it is often just as cheap as any other address.  Hence, for
2845    RISC machines, we use `address_cost' to compare the costs of various
2846    addresses.  For two addresses of equal cost, choose the one with the
2847    highest `rtx_cost' value as that has the potential of eliminating the
2848    most insns.  For equal costs, we choose the first in the equivalence
2849    class.  Note that we ignore the fact that pseudo registers are cheaper than
2850    hard registers here because we would also prefer the pseudo registers.  */
2851 
2852 static void
find_best_addr(rtx insn,rtx * loc,enum machine_mode mode)2853 find_best_addr (rtx insn, rtx *loc, enum machine_mode mode)
2854 {
2855   struct table_elt *elt;
2856   rtx addr = *loc;
2857   struct table_elt *p;
2858   int found_better = 1;
2859   int save_do_not_record = do_not_record;
2860   int save_hash_arg_in_memory = hash_arg_in_memory;
2861   int addr_volatile;
2862   int regno;
2863   unsigned hash;
2864 
2865   /* Do not try to replace constant addresses or addresses of local and
2866      argument slots.  These MEM expressions are made only once and inserted
2867      in many instructions, as well as being used to control symbol table
2868      output.  It is not safe to clobber them.
2869 
2870      There are some uncommon cases where the address is already in a register
2871      for some reason, but we cannot take advantage of that because we have
2872      no easy way to unshare the MEM.  In addition, looking up all stack
2873      addresses is costly.  */
2874   if ((GET_CODE (addr) == PLUS
2875        && REG_P (XEXP (addr, 0))
2876        && GET_CODE (XEXP (addr, 1)) == CONST_INT
2877        && (regno = REGNO (XEXP (addr, 0)),
2878 	   regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2879 	   || regno == ARG_POINTER_REGNUM))
2880       || (REG_P (addr)
2881 	  && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2882 	      || regno == HARD_FRAME_POINTER_REGNUM
2883 	      || regno == ARG_POINTER_REGNUM))
2884       || CONSTANT_ADDRESS_P (addr))
2885     return;
2886 
2887   /* If this address is not simply a register, try to fold it.  This will
2888      sometimes simplify the expression.  Many simplifications
2889      will not be valid, but some, usually applying the associative rule, will
2890      be valid and produce better code.  */
2891   if (!REG_P (addr))
2892     {
2893       rtx folded = canon_for_address (fold_rtx (addr, NULL_RTX));
2894 
2895       if (folded != addr)
2896 	{
2897 	  int addr_folded_cost = address_cost (folded, mode);
2898 	  int addr_cost = address_cost (addr, mode);
2899 
2900 	  if ((addr_folded_cost < addr_cost
2901 	       || (addr_folded_cost == addr_cost
2902 		   /* ??? The rtx_cost comparison is left over from an older
2903 		      version of this code.  It is probably no longer helpful.*/
2904 		   && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2905 		       || approx_reg_cost (folded) < approx_reg_cost (addr))))
2906 	      && validate_change (insn, loc, folded, 0))
2907 	    addr = folded;
2908 	}
2909     }
2910 
2911   /* If this address is not in the hash table, we can't look for equivalences
2912      of the whole address.  Also, ignore if volatile.  */
2913 
2914   do_not_record = 0;
2915   hash = HASH (addr, Pmode);
2916   addr_volatile = do_not_record;
2917   do_not_record = save_do_not_record;
2918   hash_arg_in_memory = save_hash_arg_in_memory;
2919 
2920   if (addr_volatile)
2921     return;
2922 
2923   elt = lookup (addr, hash, Pmode);
2924 
2925   if (elt)
2926     {
2927       /* We need to find the best (under the criteria documented above) entry
2928 	 in the class that is valid.  We use the `flag' field to indicate
2929 	 choices that were invalid and iterate until we can't find a better
2930 	 one that hasn't already been tried.  */
2931 
2932       for (p = elt->first_same_value; p; p = p->next_same_value)
2933 	p->flag = 0;
2934 
2935       while (found_better)
2936 	{
2937 	  int best_addr_cost = address_cost (*loc, mode);
2938 	  int best_rtx_cost = (elt->cost + 1) >> 1;
2939 	  int exp_cost;
2940 	  struct table_elt *best_elt = elt;
2941 
2942 	  found_better = 0;
2943 	  for (p = elt->first_same_value; p; p = p->next_same_value)
2944 	    if (! p->flag)
2945 	      {
2946 		if ((REG_P (p->exp)
2947 		     || exp_equiv_p (p->exp, p->exp, 1, false))
2948 		    && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
2949 			|| (exp_cost == best_addr_cost
2950 			    && ((p->cost + 1) >> 1) > best_rtx_cost)))
2951 		  {
2952 		    found_better = 1;
2953 		    best_addr_cost = exp_cost;
2954 		    best_rtx_cost = (p->cost + 1) >> 1;
2955 		    best_elt = p;
2956 		  }
2957 	      }
2958 
2959 	  if (found_better)
2960 	    {
2961 	      if (validate_change (insn, loc,
2962 				   canon_reg (copy_rtx (best_elt->exp),
2963 					      NULL_RTX), 0))
2964 		return;
2965 	      else
2966 		best_elt->flag = 1;
2967 	    }
2968 	}
2969     }
2970 
2971   /* If the address is a binary operation with the first operand a register
2972      and the second a constant, do the same as above, but looking for
2973      equivalences of the register.  Then try to simplify before checking for
2974      the best address to use.  This catches a few cases:  First is when we
2975      have REG+const and the register is another REG+const.  We can often merge
2976      the constants and eliminate one insn and one register.  It may also be
2977      that a machine has a cheap REG+REG+const.  Finally, this improves the
2978      code on the Alpha for unaligned byte stores.  */
2979 
2980   if (flag_expensive_optimizations
2981       && ARITHMETIC_P (*loc)
2982       && REG_P (XEXP (*loc, 0)))
2983     {
2984       rtx op1 = XEXP (*loc, 1);
2985 
2986       do_not_record = 0;
2987       hash = HASH (XEXP (*loc, 0), Pmode);
2988       do_not_record = save_do_not_record;
2989       hash_arg_in_memory = save_hash_arg_in_memory;
2990 
2991       elt = lookup (XEXP (*loc, 0), hash, Pmode);
2992       if (elt == 0)
2993 	return;
2994 
2995       /* We need to find the best (under the criteria documented above) entry
2996 	 in the class that is valid.  We use the `flag' field to indicate
2997 	 choices that were invalid and iterate until we can't find a better
2998 	 one that hasn't already been tried.  */
2999 
3000       for (p = elt->first_same_value; p; p = p->next_same_value)
3001 	p->flag = 0;
3002 
3003       while (found_better)
3004 	{
3005 	  int best_addr_cost = address_cost (*loc, mode);
3006 	  int best_rtx_cost = (COST (*loc) + 1) >> 1;
3007 	  struct table_elt *best_elt = elt;
3008 	  rtx best_rtx = *loc;
3009 	  int count;
3010 
3011 	  /* This is at worst case an O(n^2) algorithm, so limit our search
3012 	     to the first 32 elements on the list.  This avoids trouble
3013 	     compiling code with very long basic blocks that can easily
3014 	     call simplify_gen_binary so many times that we run out of
3015 	     memory.  */
3016 
3017 	  found_better = 0;
3018 	  for (p = elt->first_same_value, count = 0;
3019 	       p && count < 32;
3020 	       p = p->next_same_value, count++)
3021 	    if (! p->flag
3022 		&& (REG_P (p->exp)
3023 		    || (GET_CODE (p->exp) != EXPR_LIST
3024 			&& exp_equiv_p (p->exp, p->exp, 1, false))))
3025 
3026 	      {
3027 		rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
3028 					       p->exp, op1);
3029 		int new_cost;
3030 
3031 		/* Get the canonical version of the address so we can accept
3032 		   more.  */
3033 		new = canon_for_address (new);
3034 
3035 		new_cost = address_cost (new, mode);
3036 
3037 		if (new_cost < best_addr_cost
3038 		    || (new_cost == best_addr_cost
3039 			&& (COST (new) + 1) >> 1 > best_rtx_cost))
3040 		  {
3041 		    found_better = 1;
3042 		    best_addr_cost = new_cost;
3043 		    best_rtx_cost = (COST (new) + 1) >> 1;
3044 		    best_elt = p;
3045 		    best_rtx = new;
3046 		  }
3047 	      }
3048 
3049 	  if (found_better)
3050 	    {
3051 	      if (validate_change (insn, loc,
3052 				   canon_reg (copy_rtx (best_rtx),
3053 					      NULL_RTX), 0))
3054 		return;
3055 	      else
3056 		best_elt->flag = 1;
3057 	    }
3058 	}
3059     }
3060 }
3061 
3062 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3063    operation (EQ, NE, GT, etc.), follow it back through the hash table and
3064    what values are being compared.
3065 
3066    *PARG1 and *PARG2 are updated to contain the rtx representing the values
3067    actually being compared.  For example, if *PARG1 was (cc0) and *PARG2
3068    was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3069    compared to produce cc0.
3070 
3071    The return value is the comparison operator and is either the code of
3072    A or the code corresponding to the inverse of the comparison.  */
3073 
3074 static enum rtx_code
find_comparison_args(enum rtx_code code,rtx * parg1,rtx * parg2,enum machine_mode * pmode1,enum machine_mode * pmode2)3075 find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
3076 		      enum machine_mode *pmode1, enum machine_mode *pmode2)
3077 {
3078   rtx arg1, arg2;
3079 
3080   arg1 = *parg1, arg2 = *parg2;
3081 
3082   /* If ARG2 is const0_rtx, see what ARG1 is equivalent to.  */
3083 
3084   while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3085     {
3086       /* Set nonzero when we find something of interest.  */
3087       rtx x = 0;
3088       int reverse_code = 0;
3089       struct table_elt *p = 0;
3090 
3091       /* If arg1 is a COMPARE, extract the comparison arguments from it.
3092 	 On machines with CC0, this is the only case that can occur, since
3093 	 fold_rtx will return the COMPARE or item being compared with zero
3094 	 when given CC0.  */
3095 
3096       if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3097 	x = arg1;
3098 
3099       /* If ARG1 is a comparison operator and CODE is testing for
3100 	 STORE_FLAG_VALUE, get the inner arguments.  */
3101 
3102       else if (COMPARISON_P (arg1))
3103 	{
3104 #ifdef FLOAT_STORE_FLAG_VALUE
3105 	  REAL_VALUE_TYPE fsfv;
3106 #endif
3107 
3108 	  if (code == NE
3109 	      || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3110 		  && code == LT && STORE_FLAG_VALUE == -1)
3111 #ifdef FLOAT_STORE_FLAG_VALUE
3112 	      || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
3113 		  && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3114 		      REAL_VALUE_NEGATIVE (fsfv)))
3115 #endif
3116 	      )
3117 	    x = arg1;
3118 	  else if (code == EQ
3119 		   || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3120 		       && code == GE && STORE_FLAG_VALUE == -1)
3121 #ifdef FLOAT_STORE_FLAG_VALUE
3122 		   || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
3123 		       && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3124 			   REAL_VALUE_NEGATIVE (fsfv)))
3125 #endif
3126 		   )
3127 	    x = arg1, reverse_code = 1;
3128 	}
3129 
3130       /* ??? We could also check for
3131 
3132 	 (ne (and (eq (...) (const_int 1))) (const_int 0))
3133 
3134 	 and related forms, but let's wait until we see them occurring.  */
3135 
3136       if (x == 0)
3137 	/* Look up ARG1 in the hash table and see if it has an equivalence
3138 	   that lets us see what is being compared.  */
3139 	p = lookup (arg1, SAFE_HASH (arg1, GET_MODE (arg1)), GET_MODE (arg1));
3140       if (p)
3141 	{
3142 	  p = p->first_same_value;
3143 
3144 	  /* If what we compare is already known to be constant, that is as
3145 	     good as it gets.
3146 	     We need to break the loop in this case, because otherwise we
3147 	     can have an infinite loop when looking at a reg that is known
3148 	     to be a constant which is the same as a comparison of a reg
3149 	     against zero which appears later in the insn stream, which in
3150 	     turn is constant and the same as the comparison of the first reg
3151 	     against zero...  */
3152 	  if (p->is_const)
3153 	    break;
3154 	}
3155 
3156       for (; p; p = p->next_same_value)
3157 	{
3158 	  enum machine_mode inner_mode = GET_MODE (p->exp);
3159 #ifdef FLOAT_STORE_FLAG_VALUE
3160 	  REAL_VALUE_TYPE fsfv;
3161 #endif
3162 
3163 	  /* If the entry isn't valid, skip it.  */
3164 	  if (! exp_equiv_p (p->exp, p->exp, 1, false))
3165 	    continue;
3166 
3167 	  if (GET_CODE (p->exp) == COMPARE
3168 	      /* Another possibility is that this machine has a compare insn
3169 		 that includes the comparison code.  In that case, ARG1 would
3170 		 be equivalent to a comparison operation that would set ARG1 to
3171 		 either STORE_FLAG_VALUE or zero.  If this is an NE operation,
3172 		 ORIG_CODE is the actual comparison being done; if it is an EQ,
3173 		 we must reverse ORIG_CODE.  On machine with a negative value
3174 		 for STORE_FLAG_VALUE, also look at LT and GE operations.  */
3175 	      || ((code == NE
3176 		   || (code == LT
3177 		       && GET_MODE_CLASS (inner_mode) == MODE_INT
3178 		       && (GET_MODE_BITSIZE (inner_mode)
3179 			   <= HOST_BITS_PER_WIDE_INT)
3180 		       && (STORE_FLAG_VALUE
3181 			   & ((HOST_WIDE_INT) 1
3182 			      << (GET_MODE_BITSIZE (inner_mode) - 1))))
3183 #ifdef FLOAT_STORE_FLAG_VALUE
3184 		   || (code == LT
3185 		       && SCALAR_FLOAT_MODE_P (inner_mode)
3186 		       && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3187 			   REAL_VALUE_NEGATIVE (fsfv)))
3188 #endif
3189 		   )
3190 		  && COMPARISON_P (p->exp)))
3191 	    {
3192 	      x = p->exp;
3193 	      break;
3194 	    }
3195 	  else if ((code == EQ
3196 		    || (code == GE
3197 			&& GET_MODE_CLASS (inner_mode) == MODE_INT
3198 			&& (GET_MODE_BITSIZE (inner_mode)
3199 			    <= HOST_BITS_PER_WIDE_INT)
3200 			&& (STORE_FLAG_VALUE
3201 			    & ((HOST_WIDE_INT) 1
3202 			       << (GET_MODE_BITSIZE (inner_mode) - 1))))
3203 #ifdef FLOAT_STORE_FLAG_VALUE
3204 		    || (code == GE
3205 			&& SCALAR_FLOAT_MODE_P (inner_mode)
3206 			&& (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3207 			    REAL_VALUE_NEGATIVE (fsfv)))
3208 #endif
3209 		    )
3210 		   && COMPARISON_P (p->exp))
3211 	    {
3212 	      reverse_code = 1;
3213 	      x = p->exp;
3214 	      break;
3215 	    }
3216 
3217 	  /* If this non-trapping address, e.g. fp + constant, the
3218 	     equivalent is a better operand since it may let us predict
3219 	     the value of the comparison.  */
3220 	  else if (!rtx_addr_can_trap_p (p->exp))
3221 	    {
3222 	      arg1 = p->exp;
3223 	      continue;
3224 	    }
3225 	}
3226 
3227       /* If we didn't find a useful equivalence for ARG1, we are done.
3228 	 Otherwise, set up for the next iteration.  */
3229       if (x == 0)
3230 	break;
3231 
3232       /* If we need to reverse the comparison, make sure that that is
3233 	 possible -- we can't necessarily infer the value of GE from LT
3234 	 with floating-point operands.  */
3235       if (reverse_code)
3236 	{
3237 	  enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3238 	  if (reversed == UNKNOWN)
3239 	    break;
3240 	  else
3241 	    code = reversed;
3242 	}
3243       else if (COMPARISON_P (x))
3244 	code = GET_CODE (x);
3245       arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3246     }
3247 
3248   /* Return our results.  Return the modes from before fold_rtx
3249      because fold_rtx might produce const_int, and then it's too late.  */
3250   *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3251   *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3252 
3253   return code;
3254 }
3255 
3256 /* Fold SUBREG.  */
3257 
3258 static rtx
fold_rtx_subreg(rtx x,rtx insn)3259 fold_rtx_subreg (rtx x, rtx insn)
3260 {
3261   enum machine_mode mode = GET_MODE (x);
3262   rtx folded_arg0;
3263   rtx const_arg0;
3264   rtx new;
3265 
3266   /* See if we previously assigned a constant value to this SUBREG.  */
3267   if ((new = lookup_as_function (x, CONST_INT)) != 0
3268       || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3269     return new;
3270 
3271   /* If this is a paradoxical SUBREG, we have no idea what value the
3272      extra bits would have.  However, if the operand is equivalent to
3273      a SUBREG whose operand is the same as our mode, and all the modes
3274      are within a word, we can just use the inner operand because
3275      these SUBREGs just say how to treat the register.
3276 
3277      Similarly if we find an integer constant.  */
3278 
3279   if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3280     {
3281       enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3282       struct table_elt *elt;
3283 
3284       if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3285 	  && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3286 	  && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3287 			    imode)) != 0)
3288 	for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3289 	  {
3290 	    if (CONSTANT_P (elt->exp)
3291 		&& GET_MODE (elt->exp) == VOIDmode)
3292 	      return elt->exp;
3293 
3294 	    if (GET_CODE (elt->exp) == SUBREG
3295 		&& GET_MODE (SUBREG_REG (elt->exp)) == mode
3296 		&& exp_equiv_p (elt->exp, elt->exp, 1, false))
3297 	      return copy_rtx (SUBREG_REG (elt->exp));
3298 	  }
3299 
3300       return x;
3301     }
3302 
3303   /* Fold SUBREG_REG.  If it changed, see if we can simplify the
3304      SUBREG.  We might be able to if the SUBREG is extracting a single
3305      word in an integral mode or extracting the low part.  */
3306 
3307   folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3308   const_arg0 = equiv_constant (folded_arg0);
3309   if (const_arg0)
3310     folded_arg0 = const_arg0;
3311 
3312   if (folded_arg0 != SUBREG_REG (x))
3313     {
3314       new = simplify_subreg (mode, folded_arg0,
3315 			     GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
3316       if (new)
3317 	return new;
3318     }
3319 
3320   if (REG_P (folded_arg0)
3321       && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0)))
3322     {
3323       struct table_elt *elt;
3324 
3325       elt = lookup (folded_arg0,
3326 		    HASH (folded_arg0, GET_MODE (folded_arg0)),
3327 		    GET_MODE (folded_arg0));
3328 
3329       if (elt)
3330 	elt = elt->first_same_value;
3331 
3332       if (subreg_lowpart_p (x))
3333 	/* If this is a narrowing SUBREG and our operand is a REG, see
3334 	   if we can find an equivalence for REG that is an arithmetic
3335 	   operation in a wider mode where both operands are
3336 	   paradoxical SUBREGs from objects of our result mode.  In
3337 	   that case, we couldn-t report an equivalent value for that
3338 	   operation, since we don't know what the extra bits will be.
3339 	   But we can find an equivalence for this SUBREG by folding
3340 	   that operation in the narrow mode.  This allows us to fold
3341 	   arithmetic in narrow modes when the machine only supports
3342 	   word-sized arithmetic.
3343 
3344 	   Also look for a case where we have a SUBREG whose operand
3345 	   is the same as our result.  If both modes are smaller than
3346 	   a word, we are simply interpreting a register in different
3347 	   modes and we can use the inner value.  */
3348 
3349 	for (; elt; elt = elt->next_same_value)
3350 	  {
3351 	    enum rtx_code eltcode = GET_CODE (elt->exp);
3352 
3353 	    /* Just check for unary and binary operations.  */
3354 	    if (UNARY_P (elt->exp)
3355 		&& eltcode != SIGN_EXTEND
3356 		&& eltcode != ZERO_EXTEND
3357 		&& GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3358 		&& GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode
3359 		&& (GET_MODE_CLASS (mode)
3360 		    == GET_MODE_CLASS (GET_MODE (XEXP (elt->exp, 0)))))
3361 	      {
3362 		rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3363 
3364 		if (!REG_P (op0) && ! CONSTANT_P (op0))
3365 		  op0 = fold_rtx (op0, NULL_RTX);
3366 
3367 		op0 = equiv_constant (op0);
3368 		if (op0)
3369 		  new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3370 						  op0, mode);
3371 	      }
3372 	    else if (ARITHMETIC_P (elt->exp)
3373 		     && eltcode != DIV && eltcode != MOD
3374 		     && eltcode != UDIV && eltcode != UMOD
3375 		     && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3376 		     && eltcode != ROTATE && eltcode != ROTATERT
3377 		     && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3378 			  && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3379 			      == mode))
3380 			 || CONSTANT_P (XEXP (elt->exp, 0)))
3381 		     && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3382 			  && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3383 			      == mode))
3384 			 || CONSTANT_P (XEXP (elt->exp, 1))))
3385 	      {
3386 		rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3387 		rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3388 
3389 		if (op0 && !REG_P (op0) && ! CONSTANT_P (op0))
3390 		  op0 = fold_rtx (op0, NULL_RTX);
3391 
3392 		if (op0)
3393 		  op0 = equiv_constant (op0);
3394 
3395 		if (op1 && !REG_P (op1) && ! CONSTANT_P (op1))
3396 		  op1 = fold_rtx (op1, NULL_RTX);
3397 
3398 		if (op1)
3399 		  op1 = equiv_constant (op1);
3400 
3401 		/* If we are looking for the low SImode part of
3402 		   (ashift:DI c (const_int 32)), it doesn't work to
3403 		   compute that in SImode, because a 32-bit shift in
3404 		   SImode is unpredictable.  We know the value is
3405 		   0.  */
3406 		if (op0 && op1
3407 		    && GET_CODE (elt->exp) == ASHIFT
3408 		    && GET_CODE (op1) == CONST_INT
3409 		    && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3410 		  {
3411 		    if (INTVAL (op1)
3412 			< GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3413 		      /* If the count fits in the inner mode's width,
3414 			 but exceeds the outer mode's width, the value
3415 			 will get truncated to 0 by the subreg.  */
3416 		      new = CONST0_RTX (mode);
3417 		    else
3418 		      /* If the count exceeds even the inner mode's width,
3419 			 don't fold this expression.  */
3420 		      new = 0;
3421 		  }
3422 		else if (op0 && op1)
3423 		  new = simplify_binary_operation (GET_CODE (elt->exp),
3424 						   mode, op0, op1);
3425 	      }
3426 
3427 	    else if (GET_CODE (elt->exp) == SUBREG
3428 		     && GET_MODE (SUBREG_REG (elt->exp)) == mode
3429 		     && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3430 			 <= UNITS_PER_WORD)
3431 		     && exp_equiv_p (elt->exp, elt->exp, 1, false))
3432 	      new = copy_rtx (SUBREG_REG (elt->exp));
3433 
3434 	    if (new)
3435 	      return new;
3436 	  }
3437       else
3438 	/* A SUBREG resulting from a zero extension may fold to zero
3439 	   if it extracts higher bits than the ZERO_EXTEND's source
3440 	   bits.  FIXME: if combine tried to, er, combine these
3441 	   instructions, this transformation may be moved to
3442 	   simplify_subreg.  */
3443 	for (; elt; elt = elt->next_same_value)
3444 	  {
3445 	    if (GET_CODE (elt->exp) == ZERO_EXTEND
3446 		&& subreg_lsb (x)
3447 		>= GET_MODE_BITSIZE (GET_MODE (XEXP (elt->exp, 0))))
3448 	      return CONST0_RTX (mode);
3449 	  }
3450     }
3451 
3452   return x;
3453 }
3454 
3455 /* Fold MEM.  Not to be called directly, see fold_rtx_mem instead.  */
3456 
3457 static rtx
fold_rtx_mem_1(rtx x,rtx insn)3458 fold_rtx_mem_1 (rtx x, rtx insn)
3459 {
3460   enum machine_mode mode = GET_MODE (x);
3461   rtx new;
3462 
3463   /* If we are not actually processing an insn, don't try to find the
3464      best address.  Not only don't we care, but we could modify the
3465      MEM in an invalid way since we have no insn to validate
3466      against.  */
3467   if (insn != 0)
3468     find_best_addr (insn, &XEXP (x, 0), mode);
3469 
3470   {
3471     /* Even if we don't fold in the insn itself, we can safely do so
3472        here, in hopes of getting a constant.  */
3473     rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3474     rtx base = 0;
3475     HOST_WIDE_INT offset = 0;
3476 
3477     if (REG_P (addr)
3478 	&& REGNO_QTY_VALID_P (REGNO (addr)))
3479       {
3480 	int addr_q = REG_QTY (REGNO (addr));
3481 	struct qty_table_elem *addr_ent = &qty_table[addr_q];
3482 
3483 	if (GET_MODE (addr) == addr_ent->mode
3484 	    && addr_ent->const_rtx != NULL_RTX)
3485 	  addr = addr_ent->const_rtx;
3486       }
3487 
3488     /* Call target hook to avoid the effects of -fpic etc....  */
3489     addr = targetm.delegitimize_address (addr);
3490 
3491     /* If address is constant, split it into a base and integer
3492        offset.  */
3493     if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3494       base = addr;
3495     else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3496 	     && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3497       {
3498 	base = XEXP (XEXP (addr, 0), 0);
3499 	offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3500       }
3501     else if (GET_CODE (addr) == LO_SUM
3502 	     && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3503       base = XEXP (addr, 1);
3504 
3505     /* If this is a constant pool reference, we can fold it into its
3506        constant to allow better value tracking.  */
3507     if (base && GET_CODE (base) == SYMBOL_REF
3508 	&& CONSTANT_POOL_ADDRESS_P (base))
3509       {
3510 	rtx constant = get_pool_constant (base);
3511 	enum machine_mode const_mode = get_pool_mode (base);
3512 	rtx new;
3513 
3514 	if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3515 	  {
3516 	    constant_pool_entries_cost = COST (constant);
3517 	    constant_pool_entries_regcost = approx_reg_cost (constant);
3518 	  }
3519 
3520 	/* If we are loading the full constant, we have an
3521 	   equivalence.  */
3522 	if (offset == 0 && mode == const_mode)
3523 	  return constant;
3524 
3525 	/* If this actually isn't a constant (weird!), we can't do
3526 	   anything.  Otherwise, handle the two most common cases:
3527 	   extracting a word from a multi-word constant, and
3528 	   extracting the low-order bits.  Other cases don't seem
3529 	   common enough to worry about.  */
3530 	if (! CONSTANT_P (constant))
3531 	  return x;
3532 
3533 	if (GET_MODE_CLASS (mode) == MODE_INT
3534 	    && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3535 	    && offset % UNITS_PER_WORD == 0
3536 	    && (new = operand_subword (constant,
3537 				       offset / UNITS_PER_WORD,
3538 				       0, const_mode)) != 0)
3539 	  return new;
3540 
3541 	if (((BYTES_BIG_ENDIAN
3542 	      && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3543 	     || (! BYTES_BIG_ENDIAN && offset == 0))
3544 	    && (new = gen_lowpart (mode, constant)) != 0)
3545 	  return new;
3546       }
3547 
3548     /* If this is a reference to a label at a known position in a jump
3549        table, we also know its value.  */
3550     if (base && GET_CODE (base) == LABEL_REF)
3551       {
3552 	rtx label = XEXP (base, 0);
3553 	rtx table_insn = NEXT_INSN (label);
3554 
3555 	if (table_insn && JUMP_P (table_insn)
3556 	    && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3557 	  {
3558 	    rtx table = PATTERN (table_insn);
3559 
3560 	    if (offset >= 0
3561 		&& (offset / GET_MODE_SIZE (GET_MODE (table))
3562 		    < XVECLEN (table, 0)))
3563 	      {
3564 		rtx label = XVECEXP
3565 		  (table, 0, offset / GET_MODE_SIZE (GET_MODE (table)));
3566 		rtx set;
3567 
3568 		/* If we have an insn that loads the label from the
3569 		   jumptable into a reg, we don't want to set the reg
3570 		   to the label, because this may cause a reference to
3571 		   the label to remain after the label is removed in
3572 		   some very obscure cases (PR middle-end/18628).  */
3573 		if (!insn)
3574 		  return label;
3575 
3576 		set = single_set (insn);
3577 
3578 		if (! set || SET_SRC (set) != x)
3579 		  return x;
3580 
3581 		/* If it's a jump, it's safe to reference the label.  */
3582 		if (SET_DEST (set) == pc_rtx)
3583 		  return label;
3584 
3585 		return x;
3586 	      }
3587 	  }
3588 	if (table_insn && JUMP_P (table_insn)
3589 	    && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3590 	  {
3591 	    rtx table = PATTERN (table_insn);
3592 
3593 	    if (offset >= 0
3594 		&& (offset / GET_MODE_SIZE (GET_MODE (table))
3595 		    < XVECLEN (table, 1)))
3596 	      {
3597 		offset /= GET_MODE_SIZE (GET_MODE (table));
3598 		new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3599 				     XEXP (table, 0));
3600 
3601 		if (GET_MODE (table) != Pmode)
3602 		  new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3603 
3604 		/* Indicate this is a constant.  This isn't a valid
3605 		   form of CONST, but it will only be used to fold the
3606 		   next insns and then discarded, so it should be
3607 		   safe.
3608 
3609 		   Note this expression must be explicitly discarded,
3610 		   by cse_insn, else it may end up in a REG_EQUAL note
3611 		   and "escape" to cause problems elsewhere.  */
3612 		return gen_rtx_CONST (GET_MODE (new), new);
3613 	      }
3614 	  }
3615       }
3616 
3617     return x;
3618   }
3619 }
3620 
3621 /* Fold MEM.  */
3622 
3623 static rtx
fold_rtx_mem(rtx x,rtx insn)3624 fold_rtx_mem (rtx x, rtx insn)
3625 {
3626   /* To avoid infinite oscillations between fold_rtx and fold_rtx_mem,
3627      refuse to allow recursion of the latter past n levels.  This can
3628      happen because fold_rtx_mem will try to fold the address of the
3629      memory reference it is passed, i.e. conceptually throwing away
3630      the MEM and reinjecting the bare address into fold_rtx.  As a
3631      result, patterns like
3632 
3633        set (reg1)
3634 	   (plus (reg)
3635 		 (mem (plus (reg2) (const_int))))
3636 
3637        set (reg2)
3638 	   (plus (reg)
3639 		 (mem (plus (reg1) (const_int))))
3640 
3641      will defeat any "first-order" short-circuit put in either
3642      function to prevent these infinite oscillations.
3643 
3644      The heuristics for determining n is as follows: since each time
3645      it is invoked fold_rtx_mem throws away a MEM, and since MEMs
3646      are generically not nested, we assume that each invocation of
3647      fold_rtx_mem corresponds to a new "top-level" operand, i.e.
3648      the source or the destination of a SET.  So fold_rtx_mem is
3649      bound to stop or cycle before n recursions, n being the number
3650      of expressions recorded in the hash table.  We also leave some
3651      play to account for the initial steps.  */
3652 
3653   static unsigned int depth;
3654   rtx ret;
3655 
3656   if (depth > 3 + table_size)
3657     return x;
3658 
3659   depth++;
3660   ret = fold_rtx_mem_1 (x, insn);
3661   depth--;
3662 
3663   return ret;
3664 }
3665 
3666 /* If X is a nontrivial arithmetic operation on an argument
3667    for which a constant value can be determined, return
3668    the result of operating on that value, as a constant.
3669    Otherwise, return X, possibly with one or more operands
3670    modified by recursive calls to this function.
3671 
3672    If X is a register whose contents are known, we do NOT
3673    return those contents here.  equiv_constant is called to
3674    perform that task.
3675 
3676    INSN is the insn that we may be modifying.  If it is 0, make a copy
3677    of X before modifying it.  */
3678 
3679 static rtx
fold_rtx(rtx x,rtx insn)3680 fold_rtx (rtx x, rtx insn)
3681 {
3682   enum rtx_code code;
3683   enum machine_mode mode;
3684   const char *fmt;
3685   int i;
3686   rtx new = 0;
3687   int copied = 0;
3688   int must_swap = 0;
3689 
3690   /* Folded equivalents of first two operands of X.  */
3691   rtx folded_arg0;
3692   rtx folded_arg1;
3693 
3694   /* Constant equivalents of first three operands of X;
3695      0 when no such equivalent is known.  */
3696   rtx const_arg0;
3697   rtx const_arg1;
3698   rtx const_arg2;
3699 
3700   /* The mode of the first operand of X.  We need this for sign and zero
3701      extends.  */
3702   enum machine_mode mode_arg0;
3703 
3704   if (x == 0)
3705     return x;
3706 
3707   mode = GET_MODE (x);
3708   code = GET_CODE (x);
3709   switch (code)
3710     {
3711     case CONST:
3712     case CONST_INT:
3713     case CONST_DOUBLE:
3714     case CONST_VECTOR:
3715     case SYMBOL_REF:
3716     case LABEL_REF:
3717     case REG:
3718     case PC:
3719       /* No use simplifying an EXPR_LIST
3720 	 since they are used only for lists of args
3721 	 in a function call's REG_EQUAL note.  */
3722     case EXPR_LIST:
3723       return x;
3724 
3725 #ifdef HAVE_cc0
3726     case CC0:
3727       return prev_insn_cc0;
3728 #endif
3729 
3730     case SUBREG:
3731       return fold_rtx_subreg (x, insn);
3732 
3733     case NOT:
3734     case NEG:
3735       /* If we have (NOT Y), see if Y is known to be (NOT Z).
3736 	 If so, (NOT Y) simplifies to Z.  Similarly for NEG.  */
3737       new = lookup_as_function (XEXP (x, 0), code);
3738       if (new)
3739 	return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3740       break;
3741 
3742     case MEM:
3743       return fold_rtx_mem (x, insn);
3744 
3745 #ifdef NO_FUNCTION_CSE
3746     case CALL:
3747       if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3748 	return x;
3749       break;
3750 #endif
3751 
3752     case ASM_OPERANDS:
3753       if (insn)
3754 	{
3755 	  for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3756 	    validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3757 			     fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3758 	}
3759       break;
3760 
3761     default:
3762       break;
3763     }
3764 
3765   const_arg0 = 0;
3766   const_arg1 = 0;
3767   const_arg2 = 0;
3768   mode_arg0 = VOIDmode;
3769 
3770   /* Try folding our operands.
3771      Then see which ones have constant values known.  */
3772 
3773   fmt = GET_RTX_FORMAT (code);
3774   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3775     if (fmt[i] == 'e')
3776       {
3777 	rtx arg = XEXP (x, i);
3778 	rtx folded_arg = arg, const_arg = 0;
3779 	enum machine_mode mode_arg = GET_MODE (arg);
3780 	rtx cheap_arg, expensive_arg;
3781 	rtx replacements[2];
3782 	int j;
3783 	int old_cost = COST_IN (XEXP (x, i), code);
3784 
3785 	/* Most arguments are cheap, so handle them specially.  */
3786 	switch (GET_CODE (arg))
3787 	  {
3788 	  case REG:
3789 	    /* This is the same as calling equiv_constant; it is duplicated
3790 	       here for speed.  */
3791 	    if (REGNO_QTY_VALID_P (REGNO (arg)))
3792 	      {
3793 		int arg_q = REG_QTY (REGNO (arg));
3794 		struct qty_table_elem *arg_ent = &qty_table[arg_q];
3795 
3796 		if (arg_ent->const_rtx != NULL_RTX
3797 		    && !REG_P (arg_ent->const_rtx)
3798 		    && GET_CODE (arg_ent->const_rtx) != PLUS)
3799 		  const_arg
3800 		    = gen_lowpart (GET_MODE (arg),
3801 					       arg_ent->const_rtx);
3802 	      }
3803 	    break;
3804 
3805 	  case CONST:
3806 	  case CONST_INT:
3807 	  case SYMBOL_REF:
3808 	  case LABEL_REF:
3809 	  case CONST_DOUBLE:
3810 	  case CONST_VECTOR:
3811 	    const_arg = arg;
3812 	    break;
3813 
3814 #ifdef HAVE_cc0
3815 	  case CC0:
3816 	    folded_arg = prev_insn_cc0;
3817 	    mode_arg = prev_insn_cc0_mode;
3818 	    const_arg = equiv_constant (folded_arg);
3819 	    break;
3820 #endif
3821 
3822 	  default:
3823 	    folded_arg = fold_rtx (arg, insn);
3824 	    const_arg = equiv_constant (folded_arg);
3825 	  }
3826 
3827 	/* For the first three operands, see if the operand
3828 	   is constant or equivalent to a constant.  */
3829 	switch (i)
3830 	  {
3831 	  case 0:
3832 	    folded_arg0 = folded_arg;
3833 	    const_arg0 = const_arg;
3834 	    mode_arg0 = mode_arg;
3835 	    break;
3836 	  case 1:
3837 	    folded_arg1 = folded_arg;
3838 	    const_arg1 = const_arg;
3839 	    break;
3840 	  case 2:
3841 	    const_arg2 = const_arg;
3842 	    break;
3843 	  }
3844 
3845 	/* Pick the least expensive of the folded argument and an
3846 	   equivalent constant argument.  */
3847 	if (const_arg == 0 || const_arg == folded_arg
3848 	    || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
3849 	  cheap_arg = folded_arg, expensive_arg = const_arg;
3850 	else
3851 	  cheap_arg = const_arg, expensive_arg = folded_arg;
3852 
3853 	/* Try to replace the operand with the cheapest of the two
3854 	   possibilities.  If it doesn't work and this is either of the first
3855 	   two operands of a commutative operation, try swapping them.
3856 	   If THAT fails, try the more expensive, provided it is cheaper
3857 	   than what is already there.  */
3858 
3859 	if (cheap_arg == XEXP (x, i))
3860 	  continue;
3861 
3862 	if (insn == 0 && ! copied)
3863 	  {
3864 	    x = copy_rtx (x);
3865 	    copied = 1;
3866 	  }
3867 
3868 	/* Order the replacements from cheapest to most expensive.  */
3869 	replacements[0] = cheap_arg;
3870 	replacements[1] = expensive_arg;
3871 
3872 	for (j = 0; j < 2 && replacements[j]; j++)
3873 	  {
3874 	    int new_cost = COST_IN (replacements[j], code);
3875 
3876 	    /* Stop if what existed before was cheaper.  Prefer constants
3877 	       in the case of a tie.  */
3878 	    if (new_cost > old_cost
3879 		|| (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3880 	      break;
3881 
3882 	    /* It's not safe to substitute the operand of a conversion
3883 	       operator with a constant, as the conversion's identity
3884 	       depends upon the mode of its operand.  This optimization
3885 	       is handled by the call to simplify_unary_operation.  */
3886 	    if (GET_RTX_CLASS (code) == RTX_UNARY
3887 		&& GET_MODE (replacements[j]) != mode_arg0
3888 		&& (code == ZERO_EXTEND
3889 		    || code == SIGN_EXTEND
3890 		    || code == TRUNCATE
3891 		    || code == FLOAT_TRUNCATE
3892 		    || code == FLOAT_EXTEND
3893 		    || code == FLOAT
3894 		    || code == FIX
3895 		    || code == UNSIGNED_FLOAT
3896 		    || code == UNSIGNED_FIX))
3897 	      continue;
3898 
3899 	    if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3900 	      break;
3901 
3902 	    if (GET_RTX_CLASS (code) == RTX_COMM_COMPARE
3903 		|| GET_RTX_CLASS (code) == RTX_COMM_ARITH)
3904 	      {
3905 		validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3906 		validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3907 
3908 		if (apply_change_group ())
3909 		  {
3910 		    /* Swap them back to be invalid so that this loop can
3911 		       continue and flag them to be swapped back later.  */
3912 		    rtx tem;
3913 
3914 		    tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3915 				       XEXP (x, 1) = tem;
3916 		    must_swap = 1;
3917 		    break;
3918 		  }
3919 	      }
3920 	  }
3921       }
3922 
3923     else
3924       {
3925 	if (fmt[i] == 'E')
3926 	  /* Don't try to fold inside of a vector of expressions.
3927 	     Doing nothing is harmless.  */
3928 	  {;}
3929       }
3930 
3931   /* If a commutative operation, place a constant integer as the second
3932      operand unless the first operand is also a constant integer.  Otherwise,
3933      place any constant second unless the first operand is also a constant.  */
3934 
3935   if (COMMUTATIVE_P (x))
3936     {
3937       if (must_swap
3938 	  || swap_commutative_operands_p (const_arg0 ? const_arg0
3939 						     : XEXP (x, 0),
3940 					  const_arg1 ? const_arg1
3941 						     : XEXP (x, 1)))
3942 	{
3943 	  rtx tem = XEXP (x, 0);
3944 
3945 	  if (insn == 0 && ! copied)
3946 	    {
3947 	      x = copy_rtx (x);
3948 	      copied = 1;
3949 	    }
3950 
3951 	  validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3952 	  validate_change (insn, &XEXP (x, 1), tem, 1);
3953 	  if (apply_change_group ())
3954 	    {
3955 	      tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3956 	      tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3957 	    }
3958 	}
3959     }
3960 
3961   /* If X is an arithmetic operation, see if we can simplify it.  */
3962 
3963   switch (GET_RTX_CLASS (code))
3964     {
3965     case RTX_UNARY:
3966       {
3967 	int is_const = 0;
3968 
3969 	/* We can't simplify extension ops unless we know the
3970 	   original mode.  */
3971 	if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3972 	    && mode_arg0 == VOIDmode)
3973 	  break;
3974 
3975 	/* If we had a CONST, strip it off and put it back later if we
3976 	   fold.  */
3977 	if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3978 	  is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3979 
3980 	new = simplify_unary_operation (code, mode,
3981 					const_arg0 ? const_arg0 : folded_arg0,
3982 					mode_arg0);
3983 	/* NEG of PLUS could be converted into MINUS, but that causes
3984 	   expressions of the form
3985 	   (CONST (MINUS (CONST_INT) (SYMBOL_REF)))
3986 	   which many ports mistakenly treat as LEGITIMATE_CONSTANT_P.
3987 	   FIXME: those ports should be fixed.  */
3988 	if (new != 0 && is_const
3989 	    && GET_CODE (new) == PLUS
3990 	    && (GET_CODE (XEXP (new, 0)) == SYMBOL_REF
3991 		|| GET_CODE (XEXP (new, 0)) == LABEL_REF)
3992 	    && GET_CODE (XEXP (new, 1)) == CONST_INT)
3993 	  new = gen_rtx_CONST (mode, new);
3994       }
3995       break;
3996 
3997     case RTX_COMPARE:
3998     case RTX_COMM_COMPARE:
3999       /* See what items are actually being compared and set FOLDED_ARG[01]
4000 	 to those values and CODE to the actual comparison code.  If any are
4001 	 constant, set CONST_ARG0 and CONST_ARG1 appropriately.  We needn't
4002 	 do anything if both operands are already known to be constant.  */
4003 
4004       /* ??? Vector mode comparisons are not supported yet.  */
4005       if (VECTOR_MODE_P (mode))
4006 	break;
4007 
4008       if (const_arg0 == 0 || const_arg1 == 0)
4009 	{
4010 	  struct table_elt *p0, *p1;
4011 	  rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
4012 	  enum machine_mode mode_arg1;
4013 
4014 #ifdef FLOAT_STORE_FLAG_VALUE
4015 	  if (SCALAR_FLOAT_MODE_P (mode))
4016 	    {
4017 	      true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
4018 			  (FLOAT_STORE_FLAG_VALUE (mode), mode));
4019 	      false_rtx = CONST0_RTX (mode);
4020 	    }
4021 #endif
4022 
4023 	  code = find_comparison_args (code, &folded_arg0, &folded_arg1,
4024 				       &mode_arg0, &mode_arg1);
4025 
4026 	  /* If the mode is VOIDmode or a MODE_CC mode, we don't know
4027 	     what kinds of things are being compared, so we can't do
4028 	     anything with this comparison.  */
4029 
4030 	  if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
4031 	    break;
4032 
4033 	  const_arg0 = equiv_constant (folded_arg0);
4034 	  const_arg1 = equiv_constant (folded_arg1);
4035 
4036 	  /* If we do not now have two constants being compared, see
4037 	     if we can nevertheless deduce some things about the
4038 	     comparison.  */
4039 	  if (const_arg0 == 0 || const_arg1 == 0)
4040 	    {
4041 	      if (const_arg1 != NULL)
4042 		{
4043 		  rtx cheapest_simplification;
4044 		  int cheapest_cost;
4045 		  rtx simp_result;
4046 		  struct table_elt *p;
4047 
4048 		  /* See if we can find an equivalent of folded_arg0
4049 		     that gets us a cheaper expression, possibly a
4050 		     constant through simplifications.  */
4051 		  p = lookup (folded_arg0, SAFE_HASH (folded_arg0, mode_arg0),
4052 			      mode_arg0);
4053 
4054 		  if (p != NULL)
4055 		    {
4056 		      cheapest_simplification = x;
4057 		      cheapest_cost = COST (x);
4058 
4059 		      for (p = p->first_same_value; p != NULL; p = p->next_same_value)
4060 			{
4061 			  int cost;
4062 
4063 			  /* If the entry isn't valid, skip it.  */
4064 			  if (! exp_equiv_p (p->exp, p->exp, 1, false))
4065 			    continue;
4066 
4067 			  /* Try to simplify using this equivalence.  */
4068 			  simp_result
4069 			    = simplify_relational_operation (code, mode,
4070 							     mode_arg0,
4071 							     p->exp,
4072 							     const_arg1);
4073 
4074 			  if (simp_result == NULL)
4075 			    continue;
4076 
4077 			  cost = COST (simp_result);
4078 			  if (cost < cheapest_cost)
4079 			    {
4080 			      cheapest_cost = cost;
4081 			      cheapest_simplification = simp_result;
4082 			    }
4083 			}
4084 
4085 		      /* If we have a cheaper expression now, use that
4086 			 and try folding it further, from the top.  */
4087 		      if (cheapest_simplification != x)
4088 			return fold_rtx (cheapest_simplification, insn);
4089 		    }
4090 		}
4091 
4092 	      /* Some addresses are known to be nonzero.  We don't know
4093 		 their sign, but equality comparisons are known.  */
4094 	      if (const_arg1 == const0_rtx
4095 		  && nonzero_address_p (folded_arg0))
4096 		{
4097 		  if (code == EQ)
4098 		    return false_rtx;
4099 		  else if (code == NE)
4100 		    return true_rtx;
4101 		}
4102 
4103 	      /* See if the two operands are the same.  */
4104 
4105 	      if (folded_arg0 == folded_arg1
4106 		  || (REG_P (folded_arg0)
4107 		      && REG_P (folded_arg1)
4108 		      && (REG_QTY (REGNO (folded_arg0))
4109 			  == REG_QTY (REGNO (folded_arg1))))
4110 		  || ((p0 = lookup (folded_arg0,
4111 				    SAFE_HASH (folded_arg0, mode_arg0),
4112 				    mode_arg0))
4113 		      && (p1 = lookup (folded_arg1,
4114 				       SAFE_HASH (folded_arg1, mode_arg0),
4115 				       mode_arg0))
4116 		      && p0->first_same_value == p1->first_same_value))
4117 		{
4118 		  /* Sadly two equal NaNs are not equivalent.  */
4119 		  if (!HONOR_NANS (mode_arg0))
4120 		    return ((code == EQ || code == LE || code == GE
4121 			     || code == LEU || code == GEU || code == UNEQ
4122 			     || code == UNLE || code == UNGE
4123 			     || code == ORDERED)
4124 			    ? true_rtx : false_rtx);
4125 		  /* Take care for the FP compares we can resolve.  */
4126 		  if (code == UNEQ || code == UNLE || code == UNGE)
4127 		    return true_rtx;
4128 		  if (code == LTGT || code == LT || code == GT)
4129 		    return false_rtx;
4130 		}
4131 
4132 	      /* If FOLDED_ARG0 is a register, see if the comparison we are
4133 		 doing now is either the same as we did before or the reverse
4134 		 (we only check the reverse if not floating-point).  */
4135 	      else if (REG_P (folded_arg0))
4136 		{
4137 		  int qty = REG_QTY (REGNO (folded_arg0));
4138 
4139 		  if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
4140 		    {
4141 		      struct qty_table_elem *ent = &qty_table[qty];
4142 
4143 		      if ((comparison_dominates_p (ent->comparison_code, code)
4144 			   || (! FLOAT_MODE_P (mode_arg0)
4145 			       && comparison_dominates_p (ent->comparison_code,
4146 						          reverse_condition (code))))
4147 			  && (rtx_equal_p (ent->comparison_const, folded_arg1)
4148 			      || (const_arg1
4149 				  && rtx_equal_p (ent->comparison_const,
4150 						  const_arg1))
4151 			      || (REG_P (folded_arg1)
4152 				  && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
4153 			return (comparison_dominates_p (ent->comparison_code, code)
4154 				? true_rtx : false_rtx);
4155 		    }
4156 		}
4157 	    }
4158 	}
4159 
4160       /* If we are comparing against zero, see if the first operand is
4161 	 equivalent to an IOR with a constant.  If so, we may be able to
4162 	 determine the result of this comparison.  */
4163 
4164       if (const_arg1 == const0_rtx)
4165 	{
4166 	  rtx y = lookup_as_function (folded_arg0, IOR);
4167 	  rtx inner_const;
4168 
4169 	  if (y != 0
4170 	      && (inner_const = equiv_constant (XEXP (y, 1))) != 0
4171 	      && GET_CODE (inner_const) == CONST_INT
4172 	      && INTVAL (inner_const) != 0)
4173 	    {
4174 	      int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
4175 	      int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
4176 			      && (INTVAL (inner_const)
4177 				  & ((HOST_WIDE_INT) 1 << sign_bitnum)));
4178 	      rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
4179 
4180 #ifdef FLOAT_STORE_FLAG_VALUE
4181 	      if (SCALAR_FLOAT_MODE_P (mode))
4182 		{
4183 		  true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
4184 			  (FLOAT_STORE_FLAG_VALUE (mode), mode));
4185 		  false_rtx = CONST0_RTX (mode);
4186 		}
4187 #endif
4188 
4189 	      switch (code)
4190 		{
4191 		case EQ:
4192 		  return false_rtx;
4193 		case NE:
4194 		  return true_rtx;
4195 		case LT:  case LE:
4196 		  if (has_sign)
4197 		    return true_rtx;
4198 		  break;
4199 		case GT:  case GE:
4200 		  if (has_sign)
4201 		    return false_rtx;
4202 		  break;
4203 		default:
4204 		  break;
4205 		}
4206 	    }
4207 	}
4208 
4209       {
4210 	rtx op0 = const_arg0 ? const_arg0 : folded_arg0;
4211 	rtx op1 = const_arg1 ? const_arg1 : folded_arg1;
4212         new = simplify_relational_operation (code, mode, mode_arg0, op0, op1);
4213       }
4214       break;
4215 
4216     case RTX_BIN_ARITH:
4217     case RTX_COMM_ARITH:
4218       switch (code)
4219 	{
4220 	case PLUS:
4221 	  /* If the second operand is a LABEL_REF, see if the first is a MINUS
4222 	     with that LABEL_REF as its second operand.  If so, the result is
4223 	     the first operand of that MINUS.  This handles switches with an
4224 	     ADDR_DIFF_VEC table.  */
4225 	  if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
4226 	    {
4227 	      rtx y
4228 		= GET_CODE (folded_arg0) == MINUS ? folded_arg0
4229 		: lookup_as_function (folded_arg0, MINUS);
4230 
4231 	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4232 		  && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
4233 		return XEXP (y, 0);
4234 
4235 	      /* Now try for a CONST of a MINUS like the above.  */
4236 	      if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
4237 			: lookup_as_function (folded_arg0, CONST))) != 0
4238 		  && GET_CODE (XEXP (y, 0)) == MINUS
4239 		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4240 		  && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
4241 		return XEXP (XEXP (y, 0), 0);
4242 	    }
4243 
4244 	  /* Likewise if the operands are in the other order.  */
4245 	  if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
4246 	    {
4247 	      rtx y
4248 		= GET_CODE (folded_arg1) == MINUS ? folded_arg1
4249 		: lookup_as_function (folded_arg1, MINUS);
4250 
4251 	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4252 		  && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
4253 		return XEXP (y, 0);
4254 
4255 	      /* Now try for a CONST of a MINUS like the above.  */
4256 	      if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
4257 			: lookup_as_function (folded_arg1, CONST))) != 0
4258 		  && GET_CODE (XEXP (y, 0)) == MINUS
4259 		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4260 		  && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
4261 		return XEXP (XEXP (y, 0), 0);
4262 	    }
4263 
4264 	  /* If second operand is a register equivalent to a negative
4265 	     CONST_INT, see if we can find a register equivalent to the
4266 	     positive constant.  Make a MINUS if so.  Don't do this for
4267 	     a non-negative constant since we might then alternate between
4268 	     choosing positive and negative constants.  Having the positive
4269 	     constant previously-used is the more common case.  Be sure
4270 	     the resulting constant is non-negative; if const_arg1 were
4271 	     the smallest negative number this would overflow: depending
4272 	     on the mode, this would either just be the same value (and
4273 	     hence not save anything) or be incorrect.  */
4274 	  if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
4275 	      && INTVAL (const_arg1) < 0
4276 	      /* This used to test
4277 
4278 	         -INTVAL (const_arg1) >= 0
4279 
4280 		 But The Sun V5.0 compilers mis-compiled that test.  So
4281 		 instead we test for the problematic value in a more direct
4282 		 manner and hope the Sun compilers get it correct.  */
4283 	      && INTVAL (const_arg1) !=
4284 	        ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
4285 	      && REG_P (folded_arg1))
4286 	    {
4287 	      rtx new_const = GEN_INT (-INTVAL (const_arg1));
4288 	      struct table_elt *p
4289 		= lookup (new_const, SAFE_HASH (new_const, mode), mode);
4290 
4291 	      if (p)
4292 		for (p = p->first_same_value; p; p = p->next_same_value)
4293 		  if (REG_P (p->exp))
4294 		    return simplify_gen_binary (MINUS, mode, folded_arg0,
4295 						canon_reg (p->exp, NULL_RTX));
4296 	    }
4297 	  goto from_plus;
4298 
4299 	case MINUS:
4300 	  /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4301 	     If so, produce (PLUS Z C2-C).  */
4302 	  if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4303 	    {
4304 	      rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4305 	      if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
4306 		return fold_rtx (plus_constant (copy_rtx (y),
4307 						-INTVAL (const_arg1)),
4308 				 NULL_RTX);
4309 	    }
4310 
4311 	  /* Fall through.  */
4312 
4313 	from_plus:
4314 	case SMIN:    case SMAX:      case UMIN:    case UMAX:
4315 	case IOR:     case AND:       case XOR:
4316 	case MULT:
4317 	case ASHIFT:  case LSHIFTRT:  case ASHIFTRT:
4318 	  /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4319 	     is known to be of similar form, we may be able to replace the
4320 	     operation with a combined operation.  This may eliminate the
4321 	     intermediate operation if every use is simplified in this way.
4322 	     Note that the similar optimization done by combine.c only works
4323 	     if the intermediate operation's result has only one reference.  */
4324 
4325 	  if (REG_P (folded_arg0)
4326 	      && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4327 	    {
4328 	      int is_shift
4329 		= (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4330 	      rtx y, inner_const, new_const;
4331 	      enum rtx_code associate_code;
4332 
4333 	      if (is_shift
4334 		  && (INTVAL (const_arg1) >= GET_MODE_BITSIZE (mode)
4335 		      || INTVAL (const_arg1) < 0))
4336 		{
4337 		  if (SHIFT_COUNT_TRUNCATED)
4338 		    const_arg1 = GEN_INT (INTVAL (const_arg1)
4339 					  & (GET_MODE_BITSIZE (mode) - 1));
4340 		  else
4341 		    break;
4342 		}
4343 
4344 	      y = lookup_as_function (folded_arg0, code);
4345 	      if (y == 0)
4346 		break;
4347 
4348 	      /* If we have compiled a statement like
4349 		 "if (x == (x & mask1))", and now are looking at
4350 		 "x & mask2", we will have a case where the first operand
4351 		 of Y is the same as our first operand.  Unless we detect
4352 		 this case, an infinite loop will result.  */
4353 	      if (XEXP (y, 0) == folded_arg0)
4354 		break;
4355 
4356 	      inner_const = equiv_constant (fold_rtx (XEXP (y, 1), 0));
4357 	      if (!inner_const || GET_CODE (inner_const) != CONST_INT)
4358 		break;
4359 
4360 	      /* Don't associate these operations if they are a PLUS with the
4361 		 same constant and it is a power of two.  These might be doable
4362 		 with a pre- or post-increment.  Similarly for two subtracts of
4363 		 identical powers of two with post decrement.  */
4364 
4365 	      if (code == PLUS && const_arg1 == inner_const
4366 		  && ((HAVE_PRE_INCREMENT
4367 			  && exact_log2 (INTVAL (const_arg1)) >= 0)
4368 		      || (HAVE_POST_INCREMENT
4369 			  && exact_log2 (INTVAL (const_arg1)) >= 0)
4370 		      || (HAVE_PRE_DECREMENT
4371 			  && exact_log2 (- INTVAL (const_arg1)) >= 0)
4372 		      || (HAVE_POST_DECREMENT
4373 			  && exact_log2 (- INTVAL (const_arg1)) >= 0)))
4374 		break;
4375 
4376 	      if (is_shift
4377 		  && (INTVAL (inner_const) >= GET_MODE_BITSIZE (mode)
4378 		      || INTVAL (inner_const) < 0))
4379 		{
4380 		  if (SHIFT_COUNT_TRUNCATED)
4381 		    inner_const = GEN_INT (INTVAL (inner_const)
4382 					   & (GET_MODE_BITSIZE (mode) - 1));
4383 		  else
4384 		    break;
4385 		}
4386 
4387 	      /* Compute the code used to compose the constants.  For example,
4388 		 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS.  */
4389 
4390 	      associate_code = (is_shift || code == MINUS ? PLUS : code);
4391 
4392 	      new_const = simplify_binary_operation (associate_code, mode,
4393 						     const_arg1, inner_const);
4394 
4395 	      if (new_const == 0)
4396 		break;
4397 
4398 	      /* If we are associating shift operations, don't let this
4399 		 produce a shift of the size of the object or larger.
4400 		 This could occur when we follow a sign-extend by a right
4401 		 shift on a machine that does a sign-extend as a pair
4402 		 of shifts.  */
4403 
4404 	      if (is_shift
4405 		  && GET_CODE (new_const) == CONST_INT
4406 		  && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4407 		{
4408 		  /* As an exception, we can turn an ASHIFTRT of this
4409 		     form into a shift of the number of bits - 1.  */
4410 		  if (code == ASHIFTRT)
4411 		    new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4412 		  else if (!side_effects_p (XEXP (y, 0)))
4413 		    return CONST0_RTX (mode);
4414 		  else
4415 		    break;
4416 		}
4417 
4418 	      y = copy_rtx (XEXP (y, 0));
4419 
4420 	      /* If Y contains our first operand (the most common way this
4421 		 can happen is if Y is a MEM), we would do into an infinite
4422 		 loop if we tried to fold it.  So don't in that case.  */
4423 
4424 	      if (! reg_mentioned_p (folded_arg0, y))
4425 		y = fold_rtx (y, insn);
4426 
4427 	      return simplify_gen_binary (code, mode, y, new_const);
4428 	    }
4429 	  break;
4430 
4431 	case DIV:       case UDIV:
4432 	  /* ??? The associative optimization performed immediately above is
4433 	     also possible for DIV and UDIV using associate_code of MULT.
4434 	     However, we would need extra code to verify that the
4435 	     multiplication does not overflow, that is, there is no overflow
4436 	     in the calculation of new_const.  */
4437 	  break;
4438 
4439 	default:
4440 	  break;
4441 	}
4442 
4443       new = simplify_binary_operation (code, mode,
4444 				       const_arg0 ? const_arg0 : folded_arg0,
4445 				       const_arg1 ? const_arg1 : folded_arg1);
4446       break;
4447 
4448     case RTX_OBJ:
4449       /* (lo_sum (high X) X) is simply X.  */
4450       if (code == LO_SUM && const_arg0 != 0
4451 	  && GET_CODE (const_arg0) == HIGH
4452 	  && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4453 	return const_arg1;
4454       break;
4455 
4456     case RTX_TERNARY:
4457     case RTX_BITFIELD_OPS:
4458       new = simplify_ternary_operation (code, mode, mode_arg0,
4459 					const_arg0 ? const_arg0 : folded_arg0,
4460 					const_arg1 ? const_arg1 : folded_arg1,
4461 					const_arg2 ? const_arg2 : XEXP (x, 2));
4462       break;
4463 
4464     default:
4465       break;
4466     }
4467 
4468   return new ? new : x;
4469 }
4470 
4471 /* Return a constant value currently equivalent to X.
4472    Return 0 if we don't know one.  */
4473 
4474 static rtx
equiv_constant(rtx x)4475 equiv_constant (rtx x)
4476 {
4477   if (REG_P (x)
4478       && REGNO_QTY_VALID_P (REGNO (x)))
4479     {
4480       int x_q = REG_QTY (REGNO (x));
4481       struct qty_table_elem *x_ent = &qty_table[x_q];
4482 
4483       if (x_ent->const_rtx)
4484 	x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
4485     }
4486 
4487   if (x == 0 || CONSTANT_P (x))
4488     return x;
4489 
4490   /* If X is a MEM, try to fold it outside the context of any insn to see if
4491      it might be equivalent to a constant.  That handles the case where it
4492      is a constant-pool reference.  Then try to look it up in the hash table
4493      in case it is something whose value we have seen before.  */
4494 
4495   if (MEM_P (x))
4496     {
4497       struct table_elt *elt;
4498 
4499       x = fold_rtx (x, NULL_RTX);
4500       if (CONSTANT_P (x))
4501 	return x;
4502 
4503       elt = lookup (x, SAFE_HASH (x, GET_MODE (x)), GET_MODE (x));
4504       if (elt == 0)
4505 	return 0;
4506 
4507       for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4508 	if (elt->is_const && CONSTANT_P (elt->exp))
4509 	  return elt->exp;
4510     }
4511 
4512   return 0;
4513 }
4514 
4515 /* Given INSN, a jump insn, PATH_TAKEN indicates if we are following the "taken"
4516    branch.  It will be zero if not.
4517 
4518    In certain cases, this can cause us to add an equivalence.  For example,
4519    if we are following the taken case of
4520 	if (i == 2)
4521    we can add the fact that `i' and '2' are now equivalent.
4522 
4523    In any case, we can record that this comparison was passed.  If the same
4524    comparison is seen later, we will know its value.  */
4525 
4526 static void
record_jump_equiv(rtx insn,int taken)4527 record_jump_equiv (rtx insn, int taken)
4528 {
4529   int cond_known_true;
4530   rtx op0, op1;
4531   rtx set;
4532   enum machine_mode mode, mode0, mode1;
4533   int reversed_nonequality = 0;
4534   enum rtx_code code;
4535 
4536   /* Ensure this is the right kind of insn.  */
4537   if (! any_condjump_p (insn))
4538     return;
4539   set = pc_set (insn);
4540 
4541   /* See if this jump condition is known true or false.  */
4542   if (taken)
4543     cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
4544   else
4545     cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
4546 
4547   /* Get the type of comparison being done and the operands being compared.
4548      If we had to reverse a non-equality condition, record that fact so we
4549      know that it isn't valid for floating-point.  */
4550   code = GET_CODE (XEXP (SET_SRC (set), 0));
4551   op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4552   op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
4553 
4554   code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4555 
4556   /* If the mode is a MODE_CC mode, we don't know what kinds of things
4557      are being compared, so we can't do anything with this
4558      comparison.  */
4559 
4560   if (GET_MODE_CLASS (mode0) == MODE_CC)
4561     return;
4562 
4563   if (! cond_known_true)
4564     {
4565       code = reversed_comparison_code_parts (code, op0, op1, insn);
4566 
4567       /* Don't remember if we can't find the inverse.  */
4568       if (code == UNKNOWN)
4569 	return;
4570     }
4571 
4572   /* The mode is the mode of the non-constant.  */
4573   mode = mode0;
4574   if (mode1 != VOIDmode)
4575     mode = mode1;
4576 
4577   record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4578 }
4579 
4580 /* Yet another form of subreg creation.  In this case, we want something in
4581    MODE, and we should assume OP has MODE iff it is naturally modeless.  */
4582 
4583 static rtx
record_jump_cond_subreg(enum machine_mode mode,rtx op)4584 record_jump_cond_subreg (enum machine_mode mode, rtx op)
4585 {
4586   enum machine_mode op_mode = GET_MODE (op);
4587   if (op_mode == mode || op_mode == VOIDmode)
4588     return op;
4589   return lowpart_subreg (mode, op, op_mode);
4590 }
4591 
4592 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4593    REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4594    Make any useful entries we can with that information.  Called from
4595    above function and called recursively.  */
4596 
4597 static void
record_jump_cond(enum rtx_code code,enum machine_mode mode,rtx op0,rtx op1,int reversed_nonequality)4598 record_jump_cond (enum rtx_code code, enum machine_mode mode, rtx op0,
4599 		  rtx op1, int reversed_nonequality)
4600 {
4601   unsigned op0_hash, op1_hash;
4602   int op0_in_memory, op1_in_memory;
4603   struct table_elt *op0_elt, *op1_elt;
4604 
4605   /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4606      we know that they are also equal in the smaller mode (this is also
4607      true for all smaller modes whether or not there is a SUBREG, but
4608      is not worth testing for with no SUBREG).  */
4609 
4610   /* Note that GET_MODE (op0) may not equal MODE.  */
4611   if (code == EQ && GET_CODE (op0) == SUBREG
4612       && (GET_MODE_SIZE (GET_MODE (op0))
4613 	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4614     {
4615       enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4616       rtx tem = record_jump_cond_subreg (inner_mode, op1);
4617       if (tem)
4618 	record_jump_cond (code, mode, SUBREG_REG (op0), tem,
4619 			  reversed_nonequality);
4620     }
4621 
4622   if (code == EQ && GET_CODE (op1) == SUBREG
4623       && (GET_MODE_SIZE (GET_MODE (op1))
4624 	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4625     {
4626       enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4627       rtx tem = record_jump_cond_subreg (inner_mode, op0);
4628       if (tem)
4629 	record_jump_cond (code, mode, SUBREG_REG (op1), tem,
4630 			  reversed_nonequality);
4631     }
4632 
4633   /* Similarly, if this is an NE comparison, and either is a SUBREG
4634      making a smaller mode, we know the whole thing is also NE.  */
4635 
4636   /* Note that GET_MODE (op0) may not equal MODE;
4637      if we test MODE instead, we can get an infinite recursion
4638      alternating between two modes each wider than MODE.  */
4639 
4640   if (code == NE && GET_CODE (op0) == SUBREG
4641       && subreg_lowpart_p (op0)
4642       && (GET_MODE_SIZE (GET_MODE (op0))
4643 	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4644     {
4645       enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4646       rtx tem = record_jump_cond_subreg (inner_mode, op1);
4647       if (tem)
4648 	record_jump_cond (code, mode, SUBREG_REG (op0), tem,
4649 			  reversed_nonequality);
4650     }
4651 
4652   if (code == NE && GET_CODE (op1) == SUBREG
4653       && subreg_lowpart_p (op1)
4654       && (GET_MODE_SIZE (GET_MODE (op1))
4655 	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4656     {
4657       enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4658       rtx tem = record_jump_cond_subreg (inner_mode, op0);
4659       if (tem)
4660 	record_jump_cond (code, mode, SUBREG_REG (op1), tem,
4661 			  reversed_nonequality);
4662     }
4663 
4664   /* Hash both operands.  */
4665 
4666   do_not_record = 0;
4667   hash_arg_in_memory = 0;
4668   op0_hash = HASH (op0, mode);
4669   op0_in_memory = hash_arg_in_memory;
4670 
4671   if (do_not_record)
4672     return;
4673 
4674   do_not_record = 0;
4675   hash_arg_in_memory = 0;
4676   op1_hash = HASH (op1, mode);
4677   op1_in_memory = hash_arg_in_memory;
4678 
4679   if (do_not_record)
4680     return;
4681 
4682   /* Look up both operands.  */
4683   op0_elt = lookup (op0, op0_hash, mode);
4684   op1_elt = lookup (op1, op1_hash, mode);
4685 
4686   /* If both operands are already equivalent or if they are not in the
4687      table but are identical, do nothing.  */
4688   if ((op0_elt != 0 && op1_elt != 0
4689        && op0_elt->first_same_value == op1_elt->first_same_value)
4690       || op0 == op1 || rtx_equal_p (op0, op1))
4691     return;
4692 
4693   /* If we aren't setting two things equal all we can do is save this
4694      comparison.   Similarly if this is floating-point.  In the latter
4695      case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4696      If we record the equality, we might inadvertently delete code
4697      whose intent was to change -0 to +0.  */
4698 
4699   if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4700     {
4701       struct qty_table_elem *ent;
4702       int qty;
4703 
4704       /* If we reversed a floating-point comparison, if OP0 is not a
4705 	 register, or if OP1 is neither a register or constant, we can't
4706 	 do anything.  */
4707 
4708       if (!REG_P (op1))
4709 	op1 = equiv_constant (op1);
4710 
4711       if ((reversed_nonequality && FLOAT_MODE_P (mode))
4712 	  || !REG_P (op0) || op1 == 0)
4713 	return;
4714 
4715       /* Put OP0 in the hash table if it isn't already.  This gives it a
4716 	 new quantity number.  */
4717       if (op0_elt == 0)
4718 	{
4719 	  if (insert_regs (op0, NULL, 0))
4720 	    {
4721 	      rehash_using_reg (op0);
4722 	      op0_hash = HASH (op0, mode);
4723 
4724 	      /* If OP0 is contained in OP1, this changes its hash code
4725 		 as well.  Faster to rehash than to check, except
4726 		 for the simple case of a constant.  */
4727 	      if (! CONSTANT_P (op1))
4728 		op1_hash = HASH (op1,mode);
4729 	    }
4730 
4731 	  op0_elt = insert (op0, NULL, op0_hash, mode);
4732 	  op0_elt->in_memory = op0_in_memory;
4733 	}
4734 
4735       qty = REG_QTY (REGNO (op0));
4736       ent = &qty_table[qty];
4737 
4738       ent->comparison_code = code;
4739       if (REG_P (op1))
4740 	{
4741 	  /* Look it up again--in case op0 and op1 are the same.  */
4742 	  op1_elt = lookup (op1, op1_hash, mode);
4743 
4744 	  /* Put OP1 in the hash table so it gets a new quantity number.  */
4745 	  if (op1_elt == 0)
4746 	    {
4747 	      if (insert_regs (op1, NULL, 0))
4748 		{
4749 		  rehash_using_reg (op1);
4750 		  op1_hash = HASH (op1, mode);
4751 		}
4752 
4753 	      op1_elt = insert (op1, NULL, op1_hash, mode);
4754 	      op1_elt->in_memory = op1_in_memory;
4755 	    }
4756 
4757 	  ent->comparison_const = NULL_RTX;
4758 	  ent->comparison_qty = REG_QTY (REGNO (op1));
4759 	}
4760       else
4761 	{
4762 	  ent->comparison_const = op1;
4763 	  ent->comparison_qty = -1;
4764 	}
4765 
4766       return;
4767     }
4768 
4769   /* If either side is still missing an equivalence, make it now,
4770      then merge the equivalences.  */
4771 
4772   if (op0_elt == 0)
4773     {
4774       if (insert_regs (op0, NULL, 0))
4775 	{
4776 	  rehash_using_reg (op0);
4777 	  op0_hash = HASH (op0, mode);
4778 	}
4779 
4780       op0_elt = insert (op0, NULL, op0_hash, mode);
4781       op0_elt->in_memory = op0_in_memory;
4782     }
4783 
4784   if (op1_elt == 0)
4785     {
4786       if (insert_regs (op1, NULL, 0))
4787 	{
4788 	  rehash_using_reg (op1);
4789 	  op1_hash = HASH (op1, mode);
4790 	}
4791 
4792       op1_elt = insert (op1, NULL, op1_hash, mode);
4793       op1_elt->in_memory = op1_in_memory;
4794     }
4795 
4796   merge_equiv_classes (op0_elt, op1_elt);
4797 }
4798 
4799 /* CSE processing for one instruction.
4800    First simplify sources and addresses of all assignments
4801    in the instruction, using previously-computed equivalents values.
4802    Then install the new sources and destinations in the table
4803    of available values.
4804 
4805    If LIBCALL_INSN is nonzero, don't record any equivalence made in
4806    the insn.  It means that INSN is inside libcall block.  In this
4807    case LIBCALL_INSN is the corresponding insn with REG_LIBCALL.  */
4808 
4809 /* Data on one SET contained in the instruction.  */
4810 
4811 struct set
4812 {
4813   /* The SET rtx itself.  */
4814   rtx rtl;
4815   /* The SET_SRC of the rtx (the original value, if it is changing).  */
4816   rtx src;
4817   /* The hash-table element for the SET_SRC of the SET.  */
4818   struct table_elt *src_elt;
4819   /* Hash value for the SET_SRC.  */
4820   unsigned src_hash;
4821   /* Hash value for the SET_DEST.  */
4822   unsigned dest_hash;
4823   /* The SET_DEST, with SUBREG, etc., stripped.  */
4824   rtx inner_dest;
4825   /* Nonzero if the SET_SRC is in memory.  */
4826   char src_in_memory;
4827   /* Nonzero if the SET_SRC contains something
4828      whose value cannot be predicted and understood.  */
4829   char src_volatile;
4830   /* Original machine mode, in case it becomes a CONST_INT.
4831      The size of this field should match the size of the mode
4832      field of struct rtx_def (see rtl.h).  */
4833   ENUM_BITFIELD(machine_mode) mode : 8;
4834   /* A constant equivalent for SET_SRC, if any.  */
4835   rtx src_const;
4836   /* Original SET_SRC value used for libcall notes.  */
4837   rtx orig_src;
4838   /* Hash value of constant equivalent for SET_SRC.  */
4839   unsigned src_const_hash;
4840   /* Table entry for constant equivalent for SET_SRC, if any.  */
4841   struct table_elt *src_const_elt;
4842   /* Table entry for the destination address.  */
4843   struct table_elt *dest_addr_elt;
4844 };
4845 
4846 static void
cse_insn(rtx insn,rtx libcall_insn)4847 cse_insn (rtx insn, rtx libcall_insn)
4848 {
4849   rtx x = PATTERN (insn);
4850   int i;
4851   rtx tem;
4852   int n_sets = 0;
4853 
4854 #ifdef HAVE_cc0
4855   /* Records what this insn does to set CC0.  */
4856   rtx this_insn_cc0 = 0;
4857   enum machine_mode this_insn_cc0_mode = VOIDmode;
4858 #endif
4859 
4860   rtx src_eqv = 0;
4861   struct table_elt *src_eqv_elt = 0;
4862   int src_eqv_volatile = 0;
4863   int src_eqv_in_memory = 0;
4864   unsigned src_eqv_hash = 0;
4865 
4866   struct set *sets = (struct set *) 0;
4867 
4868   this_insn = insn;
4869 
4870   /* Find all the SETs and CLOBBERs in this instruction.
4871      Record all the SETs in the array `set' and count them.
4872      Also determine whether there is a CLOBBER that invalidates
4873      all memory references, or all references at varying addresses.  */
4874 
4875   if (CALL_P (insn))
4876     {
4877       for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4878 	{
4879 	  if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4880 	    invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4881 	  XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4882 	}
4883     }
4884 
4885   if (GET_CODE (x) == SET)
4886     {
4887       sets = alloca (sizeof (struct set));
4888       sets[0].rtl = x;
4889 
4890       /* Ignore SETs that are unconditional jumps.
4891 	 They never need cse processing, so this does not hurt.
4892 	 The reason is not efficiency but rather
4893 	 so that we can test at the end for instructions
4894 	 that have been simplified to unconditional jumps
4895 	 and not be misled by unchanged instructions
4896 	 that were unconditional jumps to begin with.  */
4897       if (SET_DEST (x) == pc_rtx
4898 	  && GET_CODE (SET_SRC (x)) == LABEL_REF)
4899 	;
4900 
4901       /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4902 	 The hard function value register is used only once, to copy to
4903 	 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4904 	 Ensure we invalidate the destination register.  On the 80386 no
4905 	 other code would invalidate it since it is a fixed_reg.
4906 	 We need not check the return of apply_change_group; see canon_reg.  */
4907 
4908       else if (GET_CODE (SET_SRC (x)) == CALL)
4909 	{
4910 	  canon_reg (SET_SRC (x), insn);
4911 	  apply_change_group ();
4912 	  fold_rtx (SET_SRC (x), insn);
4913 	  invalidate (SET_DEST (x), VOIDmode);
4914 	}
4915       else
4916 	n_sets = 1;
4917     }
4918   else if (GET_CODE (x) == PARALLEL)
4919     {
4920       int lim = XVECLEN (x, 0);
4921 
4922       sets = alloca (lim * sizeof (struct set));
4923 
4924       /* Find all regs explicitly clobbered in this insn,
4925 	 and ensure they are not replaced with any other regs
4926 	 elsewhere in this insn.
4927 	 When a reg that is clobbered is also used for input,
4928 	 we should presume that that is for a reason,
4929 	 and we should not substitute some other register
4930 	 which is not supposed to be clobbered.
4931 	 Therefore, this loop cannot be merged into the one below
4932 	 because a CALL may precede a CLOBBER and refer to the
4933 	 value clobbered.  We must not let a canonicalization do
4934 	 anything in that case.  */
4935       for (i = 0; i < lim; i++)
4936 	{
4937 	  rtx y = XVECEXP (x, 0, i);
4938 	  if (GET_CODE (y) == CLOBBER)
4939 	    {
4940 	      rtx clobbered = XEXP (y, 0);
4941 
4942 	      if (REG_P (clobbered)
4943 		  || GET_CODE (clobbered) == SUBREG)
4944 		invalidate (clobbered, VOIDmode);
4945 	      else if (GET_CODE (clobbered) == STRICT_LOW_PART
4946 		       || GET_CODE (clobbered) == ZERO_EXTRACT)
4947 		invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4948 	    }
4949 	}
4950 
4951       for (i = 0; i < lim; i++)
4952 	{
4953 	  rtx y = XVECEXP (x, 0, i);
4954 	  if (GET_CODE (y) == SET)
4955 	    {
4956 	      /* As above, we ignore unconditional jumps and call-insns and
4957 		 ignore the result of apply_change_group.  */
4958 	      if (GET_CODE (SET_SRC (y)) == CALL)
4959 		{
4960 		  canon_reg (SET_SRC (y), insn);
4961 		  apply_change_group ();
4962 		  fold_rtx (SET_SRC (y), insn);
4963 		  invalidate (SET_DEST (y), VOIDmode);
4964 		}
4965 	      else if (SET_DEST (y) == pc_rtx
4966 		       && GET_CODE (SET_SRC (y)) == LABEL_REF)
4967 		;
4968 	      else
4969 		sets[n_sets++].rtl = y;
4970 	    }
4971 	  else if (GET_CODE (y) == CLOBBER)
4972 	    {
4973 	      /* If we clobber memory, canon the address.
4974 		 This does nothing when a register is clobbered
4975 		 because we have already invalidated the reg.  */
4976 	      if (MEM_P (XEXP (y, 0)))
4977 		canon_reg (XEXP (y, 0), NULL_RTX);
4978 	    }
4979 	  else if (GET_CODE (y) == USE
4980 		   && ! (REG_P (XEXP (y, 0))
4981 			 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4982 	    canon_reg (y, NULL_RTX);
4983 	  else if (GET_CODE (y) == CALL)
4984 	    {
4985 	      /* The result of apply_change_group can be ignored; see
4986 		 canon_reg.  */
4987 	      canon_reg (y, insn);
4988 	      apply_change_group ();
4989 	      fold_rtx (y, insn);
4990 	    }
4991 	}
4992     }
4993   else if (GET_CODE (x) == CLOBBER)
4994     {
4995       if (MEM_P (XEXP (x, 0)))
4996 	canon_reg (XEXP (x, 0), NULL_RTX);
4997     }
4998 
4999   /* Canonicalize a USE of a pseudo register or memory location.  */
5000   else if (GET_CODE (x) == USE
5001 	   && ! (REG_P (XEXP (x, 0))
5002 		 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
5003     canon_reg (XEXP (x, 0), NULL_RTX);
5004   else if (GET_CODE (x) == CALL)
5005     {
5006       /* The result of apply_change_group can be ignored; see canon_reg.  */
5007       canon_reg (x, insn);
5008       apply_change_group ();
5009       fold_rtx (x, insn);
5010     }
5011 
5012   /* Store the equivalent value in SRC_EQV, if different, or if the DEST
5013      is a STRICT_LOW_PART.  The latter condition is necessary because SRC_EQV
5014      is handled specially for this case, and if it isn't set, then there will
5015      be no equivalence for the destination.  */
5016   if (n_sets == 1 && REG_NOTES (insn) != 0
5017       && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
5018       && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
5019 	  || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
5020     {
5021       src_eqv = fold_rtx (canon_reg (XEXP (tem, 0), NULL_RTX), insn);
5022       XEXP (tem, 0) = src_eqv;
5023     }
5024 
5025   /* Canonicalize sources and addresses of destinations.
5026      We do this in a separate pass to avoid problems when a MATCH_DUP is
5027      present in the insn pattern.  In that case, we want to ensure that
5028      we don't break the duplicate nature of the pattern.  So we will replace
5029      both operands at the same time.  Otherwise, we would fail to find an
5030      equivalent substitution in the loop calling validate_change below.
5031 
5032      We used to suppress canonicalization of DEST if it appears in SRC,
5033      but we don't do this any more.  */
5034 
5035   for (i = 0; i < n_sets; i++)
5036     {
5037       rtx dest = SET_DEST (sets[i].rtl);
5038       rtx src = SET_SRC (sets[i].rtl);
5039       rtx new = canon_reg (src, insn);
5040 
5041       sets[i].orig_src = src;
5042       validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
5043 
5044       if (GET_CODE (dest) == ZERO_EXTRACT)
5045 	{
5046 	  validate_change (insn, &XEXP (dest, 1),
5047 			   canon_reg (XEXP (dest, 1), insn), 1);
5048 	  validate_change (insn, &XEXP (dest, 2),
5049 			   canon_reg (XEXP (dest, 2), insn), 1);
5050 	}
5051 
5052       while (GET_CODE (dest) == SUBREG
5053 	     || GET_CODE (dest) == ZERO_EXTRACT
5054 	     || GET_CODE (dest) == STRICT_LOW_PART)
5055 	dest = XEXP (dest, 0);
5056 
5057       if (MEM_P (dest))
5058 	canon_reg (dest, insn);
5059     }
5060 
5061   /* Now that we have done all the replacements, we can apply the change
5062      group and see if they all work.  Note that this will cause some
5063      canonicalizations that would have worked individually not to be applied
5064      because some other canonicalization didn't work, but this should not
5065      occur often.
5066 
5067      The result of apply_change_group can be ignored; see canon_reg.  */
5068 
5069   apply_change_group ();
5070 
5071   /* Set sets[i].src_elt to the class each source belongs to.
5072      Detect assignments from or to volatile things
5073      and set set[i] to zero so they will be ignored
5074      in the rest of this function.
5075 
5076      Nothing in this loop changes the hash table or the register chains.  */
5077 
5078   for (i = 0; i < n_sets; i++)
5079     {
5080       rtx src, dest;
5081       rtx src_folded;
5082       struct table_elt *elt = 0, *p;
5083       enum machine_mode mode;
5084       rtx src_eqv_here;
5085       rtx src_const = 0;
5086       rtx src_related = 0;
5087       struct table_elt *src_const_elt = 0;
5088       int src_cost = MAX_COST;
5089       int src_eqv_cost = MAX_COST;
5090       int src_folded_cost = MAX_COST;
5091       int src_related_cost = MAX_COST;
5092       int src_elt_cost = MAX_COST;
5093       int src_regcost = MAX_COST;
5094       int src_eqv_regcost = MAX_COST;
5095       int src_folded_regcost = MAX_COST;
5096       int src_related_regcost = MAX_COST;
5097       int src_elt_regcost = MAX_COST;
5098       /* Set nonzero if we need to call force_const_mem on with the
5099 	 contents of src_folded before using it.  */
5100       int src_folded_force_flag = 0;
5101 
5102       dest = SET_DEST (sets[i].rtl);
5103       src = SET_SRC (sets[i].rtl);
5104 
5105       /* If SRC is a constant that has no machine mode,
5106 	 hash it with the destination's machine mode.
5107 	 This way we can keep different modes separate.  */
5108 
5109       mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5110       sets[i].mode = mode;
5111 
5112       if (src_eqv)
5113 	{
5114 	  enum machine_mode eqvmode = mode;
5115 	  if (GET_CODE (dest) == STRICT_LOW_PART)
5116 	    eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5117 	  do_not_record = 0;
5118 	  hash_arg_in_memory = 0;
5119 	  src_eqv_hash = HASH (src_eqv, eqvmode);
5120 
5121 	  /* Find the equivalence class for the equivalent expression.  */
5122 
5123 	  if (!do_not_record)
5124 	    src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
5125 
5126 	  src_eqv_volatile = do_not_record;
5127 	  src_eqv_in_memory = hash_arg_in_memory;
5128 	}
5129 
5130       /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
5131 	 value of the INNER register, not the destination.  So it is not
5132 	 a valid substitution for the source.  But save it for later.  */
5133       if (GET_CODE (dest) == STRICT_LOW_PART)
5134 	src_eqv_here = 0;
5135       else
5136 	src_eqv_here = src_eqv;
5137 
5138       /* Simplify and foldable subexpressions in SRC.  Then get the fully-
5139 	 simplified result, which may not necessarily be valid.  */
5140       src_folded = fold_rtx (src, insn);
5141 
5142 #if 0
5143       /* ??? This caused bad code to be generated for the m68k port with -O2.
5144 	 Suppose src is (CONST_INT -1), and that after truncation src_folded
5145 	 is (CONST_INT 3).  Suppose src_folded is then used for src_const.
5146 	 At the end we will add src and src_const to the same equivalence
5147 	 class.  We now have 3 and -1 on the same equivalence class.  This
5148 	 causes later instructions to be mis-optimized.  */
5149       /* If storing a constant in a bitfield, pre-truncate the constant
5150 	 so we will be able to record it later.  */
5151       if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
5152 	{
5153 	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5154 
5155 	  if (GET_CODE (src) == CONST_INT
5156 	      && GET_CODE (width) == CONST_INT
5157 	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5158 	      && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5159 	    src_folded
5160 	      = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
5161 					  << INTVAL (width)) - 1));
5162 	}
5163 #endif
5164 
5165       /* Compute SRC's hash code, and also notice if it
5166 	 should not be recorded at all.  In that case,
5167 	 prevent any further processing of this assignment.  */
5168       do_not_record = 0;
5169       hash_arg_in_memory = 0;
5170 
5171       sets[i].src = src;
5172       sets[i].src_hash = HASH (src, mode);
5173       sets[i].src_volatile = do_not_record;
5174       sets[i].src_in_memory = hash_arg_in_memory;
5175 
5176       /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
5177 	 a pseudo, do not record SRC.  Using SRC as a replacement for
5178 	 anything else will be incorrect in that situation.  Note that
5179 	 this usually occurs only for stack slots, in which case all the
5180 	 RTL would be referring to SRC, so we don't lose any optimization
5181 	 opportunities by not having SRC in the hash table.  */
5182 
5183       if (MEM_P (src)
5184 	  && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
5185 	  && REG_P (dest)
5186 	  && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5187 	sets[i].src_volatile = 1;
5188 
5189 #if 0
5190       /* It is no longer clear why we used to do this, but it doesn't
5191 	 appear to still be needed.  So let's try without it since this
5192 	 code hurts cse'ing widened ops.  */
5193       /* If source is a paradoxical subreg (such as QI treated as an SI),
5194 	 treat it as volatile.  It may do the work of an SI in one context
5195 	 where the extra bits are not being used, but cannot replace an SI
5196 	 in general.  */
5197       if (GET_CODE (src) == SUBREG
5198 	  && (GET_MODE_SIZE (GET_MODE (src))
5199 	      > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
5200 	sets[i].src_volatile = 1;
5201 #endif
5202 
5203       /* Locate all possible equivalent forms for SRC.  Try to replace
5204          SRC in the insn with each cheaper equivalent.
5205 
5206          We have the following types of equivalents: SRC itself, a folded
5207          version, a value given in a REG_EQUAL note, or a value related
5208 	 to a constant.
5209 
5210          Each of these equivalents may be part of an additional class
5211          of equivalents (if more than one is in the table, they must be in
5212          the same class; we check for this).
5213 
5214 	 If the source is volatile, we don't do any table lookups.
5215 
5216          We note any constant equivalent for possible later use in a
5217          REG_NOTE.  */
5218 
5219       if (!sets[i].src_volatile)
5220 	elt = lookup (src, sets[i].src_hash, mode);
5221 
5222       sets[i].src_elt = elt;
5223 
5224       if (elt && src_eqv_here && src_eqv_elt)
5225 	{
5226 	  if (elt->first_same_value != src_eqv_elt->first_same_value)
5227 	    {
5228 	      /* The REG_EQUAL is indicating that two formerly distinct
5229 		 classes are now equivalent.  So merge them.  */
5230 	      merge_equiv_classes (elt, src_eqv_elt);
5231 	      src_eqv_hash = HASH (src_eqv, elt->mode);
5232 	      src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
5233 	    }
5234 
5235 	  src_eqv_here = 0;
5236 	}
5237 
5238       else if (src_eqv_elt)
5239 	elt = src_eqv_elt;
5240 
5241       /* Try to find a constant somewhere and record it in `src_const'.
5242 	 Record its table element, if any, in `src_const_elt'.  Look in
5243 	 any known equivalences first.  (If the constant is not in the
5244 	 table, also set `sets[i].src_const_hash').  */
5245       if (elt)
5246 	for (p = elt->first_same_value; p; p = p->next_same_value)
5247 	  if (p->is_const)
5248 	    {
5249 	      src_const = p->exp;
5250 	      src_const_elt = elt;
5251 	      break;
5252 	    }
5253 
5254       if (src_const == 0
5255 	  && (CONSTANT_P (src_folded)
5256 	      /* Consider (minus (label_ref L1) (label_ref L2)) as
5257 		 "constant" here so we will record it. This allows us
5258 		 to fold switch statements when an ADDR_DIFF_VEC is used.  */
5259 	      || (GET_CODE (src_folded) == MINUS
5260 		  && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5261 		  && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5262 	src_const = src_folded, src_const_elt = elt;
5263       else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5264 	src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5265 
5266       /* If we don't know if the constant is in the table, get its
5267 	 hash code and look it up.  */
5268       if (src_const && src_const_elt == 0)
5269 	{
5270 	  sets[i].src_const_hash = HASH (src_const, mode);
5271 	  src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
5272 	}
5273 
5274       sets[i].src_const = src_const;
5275       sets[i].src_const_elt = src_const_elt;
5276 
5277       /* If the constant and our source are both in the table, mark them as
5278 	 equivalent.  Otherwise, if a constant is in the table but the source
5279 	 isn't, set ELT to it.  */
5280       if (src_const_elt && elt
5281 	  && src_const_elt->first_same_value != elt->first_same_value)
5282 	merge_equiv_classes (elt, src_const_elt);
5283       else if (src_const_elt && elt == 0)
5284 	elt = src_const_elt;
5285 
5286       /* See if there is a register linearly related to a constant
5287          equivalent of SRC.  */
5288       if (src_const
5289 	  && (GET_CODE (src_const) == CONST
5290 	      || (src_const_elt && src_const_elt->related_value != 0)))
5291 	{
5292 	  src_related = use_related_value (src_const, src_const_elt);
5293 	  if (src_related)
5294 	    {
5295 	      struct table_elt *src_related_elt
5296 		= lookup (src_related, HASH (src_related, mode), mode);
5297 	      if (src_related_elt && elt)
5298 		{
5299 		  if (elt->first_same_value
5300 		      != src_related_elt->first_same_value)
5301 		    /* This can occur when we previously saw a CONST
5302 		       involving a SYMBOL_REF and then see the SYMBOL_REF
5303 		       twice.  Merge the involved classes.  */
5304 		    merge_equiv_classes (elt, src_related_elt);
5305 
5306 		  src_related = 0;
5307 		  src_related_elt = 0;
5308 		}
5309 	      else if (src_related_elt && elt == 0)
5310 		elt = src_related_elt;
5311 	    }
5312 	}
5313 
5314       /* See if we have a CONST_INT that is already in a register in a
5315 	 wider mode.  */
5316 
5317       if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5318 	  && GET_MODE_CLASS (mode) == MODE_INT
5319 	  && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5320 	{
5321 	  enum machine_mode wider_mode;
5322 
5323 	  for (wider_mode = GET_MODE_WIDER_MODE (mode);
5324 	       GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5325 	       && src_related == 0;
5326 	       wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5327 	    {
5328 	      struct table_elt *const_elt
5329 		= lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5330 
5331 	      if (const_elt == 0)
5332 		continue;
5333 
5334 	      for (const_elt = const_elt->first_same_value;
5335 		   const_elt; const_elt = const_elt->next_same_value)
5336 		if (REG_P (const_elt->exp))
5337 		  {
5338 		    src_related = gen_lowpart (mode,
5339 							   const_elt->exp);
5340 		    break;
5341 		  }
5342 	    }
5343 	}
5344 
5345       /* Another possibility is that we have an AND with a constant in
5346 	 a mode narrower than a word.  If so, it might have been generated
5347 	 as part of an "if" which would narrow the AND.  If we already
5348 	 have done the AND in a wider mode, we can use a SUBREG of that
5349 	 value.  */
5350 
5351       if (flag_expensive_optimizations && ! src_related
5352 	  && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5353 	  && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5354 	{
5355 	  enum machine_mode tmode;
5356 	  rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
5357 
5358 	  for (tmode = GET_MODE_WIDER_MODE (mode);
5359 	       GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5360 	       tmode = GET_MODE_WIDER_MODE (tmode))
5361 	    {
5362 	      rtx inner = gen_lowpart (tmode, XEXP (src, 0));
5363 	      struct table_elt *larger_elt;
5364 
5365 	      if (inner)
5366 		{
5367 		  PUT_MODE (new_and, tmode);
5368 		  XEXP (new_and, 0) = inner;
5369 		  larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5370 		  if (larger_elt == 0)
5371 		    continue;
5372 
5373 		  for (larger_elt = larger_elt->first_same_value;
5374 		       larger_elt; larger_elt = larger_elt->next_same_value)
5375 		    if (REG_P (larger_elt->exp))
5376 		      {
5377 			src_related
5378 			  = gen_lowpart (mode, larger_elt->exp);
5379 			break;
5380 		      }
5381 
5382 		  if (src_related)
5383 		    break;
5384 		}
5385 	    }
5386 	}
5387 
5388 #ifdef LOAD_EXTEND_OP
5389       /* See if a MEM has already been loaded with a widening operation;
5390 	 if it has, we can use a subreg of that.  Many CISC machines
5391 	 also have such operations, but this is only likely to be
5392 	 beneficial on these machines.  */
5393 
5394       if (flag_expensive_optimizations && src_related == 0
5395 	  && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5396 	  && GET_MODE_CLASS (mode) == MODE_INT
5397 	  && MEM_P (src) && ! do_not_record
5398 	  && LOAD_EXTEND_OP (mode) != UNKNOWN)
5399 	{
5400 	  struct rtx_def memory_extend_buf;
5401 	  rtx memory_extend_rtx = &memory_extend_buf;
5402 	  enum machine_mode tmode;
5403 
5404 	  /* Set what we are trying to extend and the operation it might
5405 	     have been extended with.  */
5406 	  memset (memory_extend_rtx, 0, sizeof(*memory_extend_rtx));
5407 	  PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5408 	  XEXP (memory_extend_rtx, 0) = src;
5409 
5410 	  for (tmode = GET_MODE_WIDER_MODE (mode);
5411 	       GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5412 	       tmode = GET_MODE_WIDER_MODE (tmode))
5413 	    {
5414 	      struct table_elt *larger_elt;
5415 
5416 	      PUT_MODE (memory_extend_rtx, tmode);
5417 	      larger_elt = lookup (memory_extend_rtx,
5418 				   HASH (memory_extend_rtx, tmode), tmode);
5419 	      if (larger_elt == 0)
5420 		continue;
5421 
5422 	      for (larger_elt = larger_elt->first_same_value;
5423 		   larger_elt; larger_elt = larger_elt->next_same_value)
5424 		if (REG_P (larger_elt->exp))
5425 		  {
5426 		    src_related = gen_lowpart (mode,
5427 							   larger_elt->exp);
5428 		    break;
5429 		  }
5430 
5431 	      if (src_related)
5432 		break;
5433 	    }
5434 	}
5435 #endif /* LOAD_EXTEND_OP */
5436 
5437       if (src == src_folded)
5438 	src_folded = 0;
5439 
5440       /* At this point, ELT, if nonzero, points to a class of expressions
5441          equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5442 	 and SRC_RELATED, if nonzero, each contain additional equivalent
5443 	 expressions.  Prune these latter expressions by deleting expressions
5444 	 already in the equivalence class.
5445 
5446 	 Check for an equivalent identical to the destination.  If found,
5447 	 this is the preferred equivalent since it will likely lead to
5448 	 elimination of the insn.  Indicate this by placing it in
5449 	 `src_related'.  */
5450 
5451       if (elt)
5452 	elt = elt->first_same_value;
5453       for (p = elt; p; p = p->next_same_value)
5454 	{
5455 	  enum rtx_code code = GET_CODE (p->exp);
5456 
5457 	  /* If the expression is not valid, ignore it.  Then we do not
5458 	     have to check for validity below.  In most cases, we can use
5459 	     `rtx_equal_p', since canonicalization has already been done.  */
5460 	  if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, false))
5461 	    continue;
5462 
5463 	  /* Also skip paradoxical subregs, unless that's what we're
5464 	     looking for.  */
5465 	  if (code == SUBREG
5466 	      && (GET_MODE_SIZE (GET_MODE (p->exp))
5467 		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5468 	      && ! (src != 0
5469 		    && GET_CODE (src) == SUBREG
5470 		    && GET_MODE (src) == GET_MODE (p->exp)
5471 		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5472 			< GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5473 	    continue;
5474 
5475 	  if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5476 	    src = 0;
5477 	  else if (src_folded && GET_CODE (src_folded) == code
5478 		   && rtx_equal_p (src_folded, p->exp))
5479 	    src_folded = 0;
5480 	  else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5481 		   && rtx_equal_p (src_eqv_here, p->exp))
5482 	    src_eqv_here = 0;
5483 	  else if (src_related && GET_CODE (src_related) == code
5484 		   && rtx_equal_p (src_related, p->exp))
5485 	    src_related = 0;
5486 
5487 	  /* This is the same as the destination of the insns, we want
5488 	     to prefer it.  Copy it to src_related.  The code below will
5489 	     then give it a negative cost.  */
5490 	  if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5491 	    src_related = dest;
5492 	}
5493 
5494       /* Find the cheapest valid equivalent, trying all the available
5495          possibilities.  Prefer items not in the hash table to ones
5496          that are when they are equal cost.  Note that we can never
5497          worsen an insn as the current contents will also succeed.
5498 	 If we find an equivalent identical to the destination, use it as best,
5499 	 since this insn will probably be eliminated in that case.  */
5500       if (src)
5501 	{
5502 	  if (rtx_equal_p (src, dest))
5503 	    src_cost = src_regcost = -1;
5504 	  else
5505 	    {
5506 	      src_cost = COST (src);
5507 	      src_regcost = approx_reg_cost (src);
5508 	    }
5509 	}
5510 
5511       if (src_eqv_here)
5512 	{
5513 	  if (rtx_equal_p (src_eqv_here, dest))
5514 	    src_eqv_cost = src_eqv_regcost = -1;
5515 	  else
5516 	    {
5517 	      src_eqv_cost = COST (src_eqv_here);
5518 	      src_eqv_regcost = approx_reg_cost (src_eqv_here);
5519 	    }
5520 	}
5521 
5522       if (src_folded)
5523 	{
5524 	  if (rtx_equal_p (src_folded, dest))
5525 	    src_folded_cost = src_folded_regcost = -1;
5526 	  else
5527 	    {
5528 	      src_folded_cost = COST (src_folded);
5529 	      src_folded_regcost = approx_reg_cost (src_folded);
5530 	    }
5531 	}
5532 
5533       if (src_related)
5534 	{
5535 	  if (rtx_equal_p (src_related, dest))
5536 	    src_related_cost = src_related_regcost = -1;
5537 	  else
5538 	    {
5539 	      src_related_cost = COST (src_related);
5540 	      src_related_regcost = approx_reg_cost (src_related);
5541 	    }
5542 	}
5543 
5544       /* If this was an indirect jump insn, a known label will really be
5545 	 cheaper even though it looks more expensive.  */
5546       if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5547 	src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5548 
5549       /* Terminate loop when replacement made.  This must terminate since
5550          the current contents will be tested and will always be valid.  */
5551       while (1)
5552 	{
5553 	  rtx trial;
5554 
5555 	  /* Skip invalid entries.  */
5556 	  while (elt && !REG_P (elt->exp)
5557 		 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
5558 	    elt = elt->next_same_value;
5559 
5560 	  /* A paradoxical subreg would be bad here: it'll be the right
5561 	     size, but later may be adjusted so that the upper bits aren't
5562 	     what we want.  So reject it.  */
5563 	  if (elt != 0
5564 	      && GET_CODE (elt->exp) == SUBREG
5565 	      && (GET_MODE_SIZE (GET_MODE (elt->exp))
5566 		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5567 	      /* It is okay, though, if the rtx we're trying to match
5568 		 will ignore any of the bits we can't predict.  */
5569 	      && ! (src != 0
5570 		    && GET_CODE (src) == SUBREG
5571 		    && GET_MODE (src) == GET_MODE (elt->exp)
5572 		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5573 			< GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5574 	    {
5575 	      elt = elt->next_same_value;
5576 	      continue;
5577 	    }
5578 
5579 	  if (elt)
5580 	    {
5581 	      src_elt_cost = elt->cost;
5582 	      src_elt_regcost = elt->regcost;
5583 	    }
5584 
5585 	  /* Find cheapest and skip it for the next time.   For items
5586 	     of equal cost, use this order:
5587 	     src_folded, src, src_eqv, src_related and hash table entry.  */
5588 	  if (src_folded
5589 	      && preferable (src_folded_cost, src_folded_regcost,
5590 			     src_cost, src_regcost) <= 0
5591 	      && preferable (src_folded_cost, src_folded_regcost,
5592 			     src_eqv_cost, src_eqv_regcost) <= 0
5593 	      && preferable (src_folded_cost, src_folded_regcost,
5594 			     src_related_cost, src_related_regcost) <= 0
5595 	      && preferable (src_folded_cost, src_folded_regcost,
5596 			     src_elt_cost, src_elt_regcost) <= 0)
5597 	    {
5598 	      trial = src_folded, src_folded_cost = MAX_COST;
5599 	      if (src_folded_force_flag)
5600 		{
5601 		  rtx forced = force_const_mem (mode, trial);
5602 		  if (forced)
5603 		    trial = forced;
5604 		}
5605 	    }
5606 	  else if (src
5607 		   && preferable (src_cost, src_regcost,
5608 				  src_eqv_cost, src_eqv_regcost) <= 0
5609 		   && preferable (src_cost, src_regcost,
5610 				  src_related_cost, src_related_regcost) <= 0
5611 		   && preferable (src_cost, src_regcost,
5612 				  src_elt_cost, src_elt_regcost) <= 0)
5613 	    trial = src, src_cost = MAX_COST;
5614 	  else if (src_eqv_here
5615 		   && preferable (src_eqv_cost, src_eqv_regcost,
5616 				  src_related_cost, src_related_regcost) <= 0
5617 		   && preferable (src_eqv_cost, src_eqv_regcost,
5618 				  src_elt_cost, src_elt_regcost) <= 0)
5619 	    trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
5620 	  else if (src_related
5621 		   && preferable (src_related_cost, src_related_regcost,
5622 				  src_elt_cost, src_elt_regcost) <= 0)
5623 	    trial = copy_rtx (src_related), src_related_cost = MAX_COST;
5624 	  else
5625 	    {
5626 	      trial = copy_rtx (elt->exp);
5627 	      elt = elt->next_same_value;
5628 	      src_elt_cost = MAX_COST;
5629 	    }
5630 
5631 	  /* We don't normally have an insn matching (set (pc) (pc)), so
5632 	     check for this separately here.  We will delete such an
5633 	     insn below.
5634 
5635 	     For other cases such as a table jump or conditional jump
5636 	     where we know the ultimate target, go ahead and replace the
5637 	     operand.  While that may not make a valid insn, we will
5638 	     reemit the jump below (and also insert any necessary
5639 	     barriers).  */
5640 	  if (n_sets == 1 && dest == pc_rtx
5641 	      && (trial == pc_rtx
5642 		  || (GET_CODE (trial) == LABEL_REF
5643 		      && ! condjump_p (insn))))
5644 	    {
5645 	      /* Don't substitute non-local labels, this confuses CFG.  */
5646 	      if (GET_CODE (trial) == LABEL_REF
5647 		  && LABEL_REF_NONLOCAL_P (trial))
5648 		continue;
5649 
5650 	      SET_SRC (sets[i].rtl) = trial;
5651 	      cse_jumps_altered = 1;
5652 	      break;
5653 	    }
5654 
5655 	  /* Reject certain invalid forms of CONST that we create.  */
5656 	  else if (CONSTANT_P (trial)
5657 		   && GET_CODE (trial) == CONST
5658 		   /* Reject cases that will cause decode_rtx_const to
5659 		      die.  On the alpha when simplifying a switch, we
5660 		      get (const (truncate (minus (label_ref)
5661 		      (label_ref)))).  */
5662 		   && (GET_CODE (XEXP (trial, 0)) == TRUNCATE
5663 		       /* Likewise on IA-64, except without the
5664 			  truncate.  */
5665 		       || (GET_CODE (XEXP (trial, 0)) == MINUS
5666 			   && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5667 			   && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)))
5668 	    /* Do nothing for this case.  */
5669 	    ;
5670 
5671 	  /* Look for a substitution that makes a valid insn.  */
5672 	  else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5673 	    {
5674 	      rtx new = canon_reg (SET_SRC (sets[i].rtl), insn);
5675 
5676 	      /* If we just made a substitution inside a libcall, then we
5677 		 need to make the same substitution in any notes attached
5678 		 to the RETVAL insn.  */
5679 	      if (libcall_insn
5680 		  && (REG_P (sets[i].orig_src)
5681 		      || GET_CODE (sets[i].orig_src) == SUBREG
5682 		      || MEM_P (sets[i].orig_src)))
5683 		{
5684 	          rtx note = find_reg_equal_equiv_note (libcall_insn);
5685 		  if (note != 0)
5686 		    XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0),
5687 							   sets[i].orig_src,
5688 							   copy_rtx (new));
5689 		}
5690 
5691 	      /* The result of apply_change_group can be ignored; see
5692 		 canon_reg.  */
5693 
5694 	      validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
5695 	      apply_change_group ();
5696 	      break;
5697 	    }
5698 
5699 	  /* If we previously found constant pool entries for
5700 	     constants and this is a constant, try making a
5701 	     pool entry.  Put it in src_folded unless we already have done
5702 	     this since that is where it likely came from.  */
5703 
5704 	  else if (constant_pool_entries_cost
5705 		   && CONSTANT_P (trial)
5706 		   && (src_folded == 0
5707 		       || (!MEM_P (src_folded)
5708 			   && ! src_folded_force_flag))
5709 		   && GET_MODE_CLASS (mode) != MODE_CC
5710 		   && mode != VOIDmode)
5711 	    {
5712 	      src_folded_force_flag = 1;
5713 	      src_folded = trial;
5714 	      src_folded_cost = constant_pool_entries_cost;
5715 	      src_folded_regcost = constant_pool_entries_regcost;
5716 	    }
5717 	}
5718 
5719       src = SET_SRC (sets[i].rtl);
5720 
5721       /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5722 	 However, there is an important exception:  If both are registers
5723 	 that are not the head of their equivalence class, replace SET_SRC
5724 	 with the head of the class.  If we do not do this, we will have
5725 	 both registers live over a portion of the basic block.  This way,
5726 	 their lifetimes will likely abut instead of overlapping.  */
5727       if (REG_P (dest)
5728 	  && REGNO_QTY_VALID_P (REGNO (dest)))
5729 	{
5730 	  int dest_q = REG_QTY (REGNO (dest));
5731 	  struct qty_table_elem *dest_ent = &qty_table[dest_q];
5732 
5733 	  if (dest_ent->mode == GET_MODE (dest)
5734 	      && dest_ent->first_reg != REGNO (dest)
5735 	      && REG_P (src) && REGNO (src) == REGNO (dest)
5736 	      /* Don't do this if the original insn had a hard reg as
5737 		 SET_SRC or SET_DEST.  */
5738 	      && (!REG_P (sets[i].src)
5739 		  || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5740 	      && (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5741 	    /* We can't call canon_reg here because it won't do anything if
5742 	       SRC is a hard register.  */
5743 	    {
5744 	      int src_q = REG_QTY (REGNO (src));
5745 	      struct qty_table_elem *src_ent = &qty_table[src_q];
5746 	      int first = src_ent->first_reg;
5747 	      rtx new_src
5748 		= (first >= FIRST_PSEUDO_REGISTER
5749 		   ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5750 
5751 	      /* We must use validate-change even for this, because this
5752 		 might be a special no-op instruction, suitable only to
5753 		 tag notes onto.  */
5754 	      if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5755 		{
5756 		  src = new_src;
5757 		  /* If we had a constant that is cheaper than what we are now
5758 		     setting SRC to, use that constant.  We ignored it when we
5759 		     thought we could make this into a no-op.  */
5760 		  if (src_const && COST (src_const) < COST (src)
5761 		      && validate_change (insn, &SET_SRC (sets[i].rtl),
5762 					  src_const, 0))
5763 		    src = src_const;
5764 		}
5765 	    }
5766 	}
5767 
5768       /* If we made a change, recompute SRC values.  */
5769       if (src != sets[i].src)
5770 	{
5771 	  cse_altered = 1;
5772 	  do_not_record = 0;
5773 	  hash_arg_in_memory = 0;
5774 	  sets[i].src = src;
5775 	  sets[i].src_hash = HASH (src, mode);
5776 	  sets[i].src_volatile = do_not_record;
5777 	  sets[i].src_in_memory = hash_arg_in_memory;
5778 	  sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5779 	}
5780 
5781       /* If this is a single SET, we are setting a register, and we have an
5782 	 equivalent constant, we want to add a REG_NOTE.   We don't want
5783 	 to write a REG_EQUAL note for a constant pseudo since verifying that
5784 	 that pseudo hasn't been eliminated is a pain.  Such a note also
5785 	 won't help anything.
5786 
5787 	 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5788 	 which can be created for a reference to a compile time computable
5789 	 entry in a jump table.  */
5790 
5791       if (n_sets == 1 && src_const && REG_P (dest)
5792 	  && !REG_P (src_const)
5793 	  && ! (GET_CODE (src_const) == CONST
5794 		&& GET_CODE (XEXP (src_const, 0)) == MINUS
5795 		&& GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5796 		&& GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5797 	{
5798 	  /* We only want a REG_EQUAL note if src_const != src.  */
5799 	  if (! rtx_equal_p (src, src_const))
5800 	    {
5801 	      /* Make sure that the rtx is not shared.  */
5802 	      src_const = copy_rtx (src_const);
5803 
5804 	      /* Record the actual constant value in a REG_EQUAL note,
5805 		 making a new one if one does not already exist.  */
5806 	      set_unique_reg_note (insn, REG_EQUAL, src_const);
5807 	    }
5808 	}
5809 
5810       /* Now deal with the destination.  */
5811       do_not_record = 0;
5812 
5813       /* Look within any ZERO_EXTRACT to the MEM or REG within it.  */
5814       while (GET_CODE (dest) == SUBREG
5815 	     || GET_CODE (dest) == ZERO_EXTRACT
5816 	     || GET_CODE (dest) == STRICT_LOW_PART)
5817 	dest = XEXP (dest, 0);
5818 
5819       sets[i].inner_dest = dest;
5820 
5821       if (MEM_P (dest))
5822 	{
5823 #ifdef PUSH_ROUNDING
5824 	  /* Stack pushes invalidate the stack pointer.  */
5825 	  rtx addr = XEXP (dest, 0);
5826 	  if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
5827 	      && XEXP (addr, 0) == stack_pointer_rtx)
5828 	    invalidate (stack_pointer_rtx, VOIDmode);
5829 #endif
5830 	  dest = fold_rtx (dest, insn);
5831 	}
5832 
5833       /* Compute the hash code of the destination now,
5834 	 before the effects of this instruction are recorded,
5835 	 since the register values used in the address computation
5836 	 are those before this instruction.  */
5837       sets[i].dest_hash = HASH (dest, mode);
5838 
5839       /* Don't enter a bit-field in the hash table
5840 	 because the value in it after the store
5841 	 may not equal what was stored, due to truncation.  */
5842 
5843       if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
5844 	{
5845 	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5846 
5847 	  if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5848 	      && GET_CODE (width) == CONST_INT
5849 	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5850 	      && ! (INTVAL (src_const)
5851 		    & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5852 	    /* Exception: if the value is constant,
5853 	       and it won't be truncated, record it.  */
5854 	    ;
5855 	  else
5856 	    {
5857 	      /* This is chosen so that the destination will be invalidated
5858 		 but no new value will be recorded.
5859 		 We must invalidate because sometimes constant
5860 		 values can be recorded for bitfields.  */
5861 	      sets[i].src_elt = 0;
5862 	      sets[i].src_volatile = 1;
5863 	      src_eqv = 0;
5864 	      src_eqv_elt = 0;
5865 	    }
5866 	}
5867 
5868       /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5869 	 the insn.  */
5870       else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5871 	{
5872 	  /* One less use of the label this insn used to jump to.  */
5873 	  delete_insn (insn);
5874 	  cse_jumps_altered = 1;
5875 	  /* No more processing for this set.  */
5876 	  sets[i].rtl = 0;
5877 	}
5878 
5879       /* If this SET is now setting PC to a label, we know it used to
5880 	 be a conditional or computed branch.  */
5881       else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF
5882 	       && !LABEL_REF_NONLOCAL_P (src))
5883 	{
5884 	  /* Now emit a BARRIER after the unconditional jump.  */
5885 	  if (NEXT_INSN (insn) == 0
5886 	      || !BARRIER_P (NEXT_INSN (insn)))
5887 	    emit_barrier_after (insn);
5888 
5889 	  /* We reemit the jump in as many cases as possible just in
5890 	     case the form of an unconditional jump is significantly
5891 	     different than a computed jump or conditional jump.
5892 
5893 	     If this insn has multiple sets, then reemitting the
5894 	     jump is nontrivial.  So instead we just force rerecognition
5895 	     and hope for the best.  */
5896 	  if (n_sets == 1)
5897 	    {
5898 	      rtx new, note;
5899 
5900 	      new = emit_jump_insn_after (gen_jump (XEXP (src, 0)), insn);
5901 	      JUMP_LABEL (new) = XEXP (src, 0);
5902 	      LABEL_NUSES (XEXP (src, 0))++;
5903 
5904 	      /* Make sure to copy over REG_NON_LOCAL_GOTO.  */
5905 	      note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0);
5906 	      if (note)
5907 		{
5908 		  XEXP (note, 1) = NULL_RTX;
5909 		  REG_NOTES (new) = note;
5910 		}
5911 
5912 	      delete_insn (insn);
5913 	      insn = new;
5914 
5915 	      /* Now emit a BARRIER after the unconditional jump.  */
5916 	      if (NEXT_INSN (insn) == 0
5917 		  || !BARRIER_P (NEXT_INSN (insn)))
5918 		emit_barrier_after (insn);
5919 	    }
5920 	  else
5921 	    INSN_CODE (insn) = -1;
5922 
5923 	  /* Do not bother deleting any unreachable code,
5924 	     let jump/flow do that.  */
5925 
5926 	  cse_jumps_altered = 1;
5927 	  sets[i].rtl = 0;
5928 	}
5929 
5930       /* If destination is volatile, invalidate it and then do no further
5931 	 processing for this assignment.  */
5932 
5933       else if (do_not_record)
5934 	{
5935 	  if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5936 	    invalidate (dest, VOIDmode);
5937 	  else if (MEM_P (dest))
5938 	    invalidate (dest, VOIDmode);
5939 	  else if (GET_CODE (dest) == STRICT_LOW_PART
5940 		   || GET_CODE (dest) == ZERO_EXTRACT)
5941 	    invalidate (XEXP (dest, 0), GET_MODE (dest));
5942 	  sets[i].rtl = 0;
5943 	}
5944 
5945       if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5946 	sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5947 
5948 #ifdef HAVE_cc0
5949       /* If setting CC0, record what it was set to, or a constant, if it
5950 	 is equivalent to a constant.  If it is being set to a floating-point
5951 	 value, make a COMPARE with the appropriate constant of 0.  If we
5952 	 don't do this, later code can interpret this as a test against
5953 	 const0_rtx, which can cause problems if we try to put it into an
5954 	 insn as a floating-point operand.  */
5955       if (dest == cc0_rtx)
5956 	{
5957 	  this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5958 	  this_insn_cc0_mode = mode;
5959 	  if (FLOAT_MODE_P (mode))
5960 	    this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5961 					     CONST0_RTX (mode));
5962 	}
5963 #endif
5964     }
5965 
5966   /* Now enter all non-volatile source expressions in the hash table
5967      if they are not already present.
5968      Record their equivalence classes in src_elt.
5969      This way we can insert the corresponding destinations into
5970      the same classes even if the actual sources are no longer in them
5971      (having been invalidated).  */
5972 
5973   if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5974       && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5975     {
5976       struct table_elt *elt;
5977       struct table_elt *classp = sets[0].src_elt;
5978       rtx dest = SET_DEST (sets[0].rtl);
5979       enum machine_mode eqvmode = GET_MODE (dest);
5980 
5981       if (GET_CODE (dest) == STRICT_LOW_PART)
5982 	{
5983 	  eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5984 	  classp = 0;
5985 	}
5986       if (insert_regs (src_eqv, classp, 0))
5987 	{
5988 	  rehash_using_reg (src_eqv);
5989 	  src_eqv_hash = HASH (src_eqv, eqvmode);
5990 	}
5991       elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5992       elt->in_memory = src_eqv_in_memory;
5993       src_eqv_elt = elt;
5994 
5995       /* Check to see if src_eqv_elt is the same as a set source which
5996 	 does not yet have an elt, and if so set the elt of the set source
5997 	 to src_eqv_elt.  */
5998       for (i = 0; i < n_sets; i++)
5999 	if (sets[i].rtl && sets[i].src_elt == 0
6000 	    && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
6001 	  sets[i].src_elt = src_eqv_elt;
6002     }
6003 
6004   for (i = 0; i < n_sets; i++)
6005     if (sets[i].rtl && ! sets[i].src_volatile
6006 	&& ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
6007       {
6008 	if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
6009 	  {
6010 	    /* REG_EQUAL in setting a STRICT_LOW_PART
6011 	       gives an equivalent for the entire destination register,
6012 	       not just for the subreg being stored in now.
6013 	       This is a more interesting equivalence, so we arrange later
6014 	       to treat the entire reg as the destination.  */
6015 	    sets[i].src_elt = src_eqv_elt;
6016 	    sets[i].src_hash = src_eqv_hash;
6017 	  }
6018 	else
6019 	  {
6020 	    /* Insert source and constant equivalent into hash table, if not
6021 	       already present.  */
6022 	    struct table_elt *classp = src_eqv_elt;
6023 	    rtx src = sets[i].src;
6024 	    rtx dest = SET_DEST (sets[i].rtl);
6025 	    enum machine_mode mode
6026 	      = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
6027 
6028 	    /* It's possible that we have a source value known to be
6029 	       constant but don't have a REG_EQUAL note on the insn.
6030 	       Lack of a note will mean src_eqv_elt will be NULL.  This
6031 	       can happen where we've generated a SUBREG to access a
6032 	       CONST_INT that is already in a register in a wider mode.
6033 	       Ensure that the source expression is put in the proper
6034 	       constant class.  */
6035 	    if (!classp)
6036 	      classp = sets[i].src_const_elt;
6037 
6038 	    if (sets[i].src_elt == 0)
6039 	      {
6040 		/* Don't put a hard register source into the table if this is
6041 		   the last insn of a libcall.  In this case, we only need
6042 		   to put src_eqv_elt in src_elt.  */
6043 		if (! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6044 		  {
6045 		    struct table_elt *elt;
6046 
6047 		    /* Note that these insert_regs calls cannot remove
6048 		       any of the src_elt's, because they would have failed to
6049 		       match if not still valid.  */
6050 		    if (insert_regs (src, classp, 0))
6051 		      {
6052 			rehash_using_reg (src);
6053 			sets[i].src_hash = HASH (src, mode);
6054 		      }
6055 		    elt = insert (src, classp, sets[i].src_hash, mode);
6056 		    elt->in_memory = sets[i].src_in_memory;
6057 		    sets[i].src_elt = classp = elt;
6058 		  }
6059 		else
6060 		  sets[i].src_elt = classp;
6061 	      }
6062 	    if (sets[i].src_const && sets[i].src_const_elt == 0
6063 		&& src != sets[i].src_const
6064 		&& ! rtx_equal_p (sets[i].src_const, src))
6065 	      sets[i].src_elt = insert (sets[i].src_const, classp,
6066 					sets[i].src_const_hash, mode);
6067 	  }
6068       }
6069     else if (sets[i].src_elt == 0)
6070       /* If we did not insert the source into the hash table (e.g., it was
6071 	 volatile), note the equivalence class for the REG_EQUAL value, if any,
6072 	 so that the destination goes into that class.  */
6073       sets[i].src_elt = src_eqv_elt;
6074 
6075   /* Record destination addresses in the hash table.  This allows us to
6076      check if they are invalidated by other sets.  */
6077   for (i = 0; i < n_sets; i++)
6078     {
6079       if (sets[i].rtl)
6080 	{
6081 	  rtx x = sets[i].inner_dest;
6082 	  struct table_elt *elt;
6083 	  enum machine_mode mode;
6084 	  unsigned hash;
6085 
6086 	  if (MEM_P (x))
6087 	    {
6088 	      x = XEXP (x, 0);
6089 	      mode = GET_MODE (x);
6090 	      hash = HASH (x, mode);
6091 	      elt = lookup (x, hash, mode);
6092 	      if (!elt)
6093 		{
6094 		  if (insert_regs (x, NULL, 0))
6095 		    {
6096 		      rtx dest = SET_DEST (sets[i].rtl);
6097 
6098 		      rehash_using_reg (x);
6099 		      hash = HASH (x, mode);
6100 		      sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6101 		    }
6102 		  elt = insert (x, NULL, hash, mode);
6103 		}
6104 
6105 	      sets[i].dest_addr_elt = elt;
6106 	    }
6107 	  else
6108 	    sets[i].dest_addr_elt = NULL;
6109 	}
6110     }
6111 
6112   invalidate_from_clobbers (x);
6113 
6114   /* Some registers are invalidated by subroutine calls.  Memory is
6115      invalidated by non-constant calls.  */
6116 
6117   if (CALL_P (insn))
6118     {
6119       if (! CONST_OR_PURE_CALL_P (insn))
6120 	invalidate_memory ();
6121       invalidate_for_call ();
6122     }
6123 
6124   /* Now invalidate everything set by this instruction.
6125      If a SUBREG or other funny destination is being set,
6126      sets[i].rtl is still nonzero, so here we invalidate the reg
6127      a part of which is being set.  */
6128 
6129   for (i = 0; i < n_sets; i++)
6130     if (sets[i].rtl)
6131       {
6132 	/* We can't use the inner dest, because the mode associated with
6133 	   a ZERO_EXTRACT is significant.  */
6134 	rtx dest = SET_DEST (sets[i].rtl);
6135 
6136 	/* Needed for registers to remove the register from its
6137 	   previous quantity's chain.
6138 	   Needed for memory if this is a nonvarying address, unless
6139 	   we have just done an invalidate_memory that covers even those.  */
6140 	if (REG_P (dest) || GET_CODE (dest) == SUBREG)
6141 	  invalidate (dest, VOIDmode);
6142 	else if (MEM_P (dest))
6143 	  invalidate (dest, VOIDmode);
6144 	else if (GET_CODE (dest) == STRICT_LOW_PART
6145 		 || GET_CODE (dest) == ZERO_EXTRACT)
6146 	  invalidate (XEXP (dest, 0), GET_MODE (dest));
6147       }
6148 
6149   /* A volatile ASM invalidates everything.  */
6150   if (NONJUMP_INSN_P (insn)
6151       && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
6152       && MEM_VOLATILE_P (PATTERN (insn)))
6153     flush_hash_table ();
6154 
6155   /* Make sure registers mentioned in destinations
6156      are safe for use in an expression to be inserted.
6157      This removes from the hash table
6158      any invalid entry that refers to one of these registers.
6159 
6160      We don't care about the return value from mention_regs because
6161      we are going to hash the SET_DEST values unconditionally.  */
6162 
6163   for (i = 0; i < n_sets; i++)
6164     {
6165       if (sets[i].rtl)
6166 	{
6167 	  rtx x = SET_DEST (sets[i].rtl);
6168 
6169 	  if (!REG_P (x))
6170 	    mention_regs (x);
6171 	  else
6172 	    {
6173 	      /* We used to rely on all references to a register becoming
6174 		 inaccessible when a register changes to a new quantity,
6175 		 since that changes the hash code.  However, that is not
6176 		 safe, since after HASH_SIZE new quantities we get a
6177 		 hash 'collision' of a register with its own invalid
6178 		 entries.  And since SUBREGs have been changed not to
6179 		 change their hash code with the hash code of the register,
6180 		 it wouldn't work any longer at all.  So we have to check
6181 		 for any invalid references lying around now.
6182 		 This code is similar to the REG case in mention_regs,
6183 		 but it knows that reg_tick has been incremented, and
6184 		 it leaves reg_in_table as -1 .  */
6185 	      unsigned int regno = REGNO (x);
6186 	      unsigned int endregno
6187 		= regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
6188 			   : hard_regno_nregs[regno][GET_MODE (x)]);
6189 	      unsigned int i;
6190 
6191 	      for (i = regno; i < endregno; i++)
6192 		{
6193 		  if (REG_IN_TABLE (i) >= 0)
6194 		    {
6195 		      remove_invalid_refs (i);
6196 		      REG_IN_TABLE (i) = -1;
6197 		    }
6198 		}
6199 	    }
6200 	}
6201     }
6202 
6203   /* We may have just removed some of the src_elt's from the hash table.
6204      So replace each one with the current head of the same class.
6205      Also check if destination addresses have been removed.  */
6206 
6207   for (i = 0; i < n_sets; i++)
6208     if (sets[i].rtl)
6209       {
6210 	if (sets[i].dest_addr_elt
6211 	    && sets[i].dest_addr_elt->first_same_value == 0)
6212 	  {
6213 	    /* The elt was removed, which means this destination is not
6214 	       valid after this instruction.  */
6215 	    sets[i].rtl = NULL_RTX;
6216 	  }
6217 	else if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
6218 	  /* If elt was removed, find current head of same class,
6219 	     or 0 if nothing remains of that class.  */
6220 	  {
6221 	    struct table_elt *elt = sets[i].src_elt;
6222 
6223 	    while (elt && elt->prev_same_value)
6224 	      elt = elt->prev_same_value;
6225 
6226 	    while (elt && elt->first_same_value == 0)
6227 	      elt = elt->next_same_value;
6228 	    sets[i].src_elt = elt ? elt->first_same_value : 0;
6229 	  }
6230       }
6231 
6232   /* Now insert the destinations into their equivalence classes.  */
6233 
6234   for (i = 0; i < n_sets; i++)
6235     if (sets[i].rtl)
6236       {
6237 	rtx dest = SET_DEST (sets[i].rtl);
6238 	struct table_elt *elt;
6239 
6240 	/* Don't record value if we are not supposed to risk allocating
6241 	   floating-point values in registers that might be wider than
6242 	   memory.  */
6243 	if ((flag_float_store
6244 	     && MEM_P (dest)
6245 	     && FLOAT_MODE_P (GET_MODE (dest)))
6246 	    /* Don't record BLKmode values, because we don't know the
6247 	       size of it, and can't be sure that other BLKmode values
6248 	       have the same or smaller size.  */
6249 	    || GET_MODE (dest) == BLKmode
6250 	    /* Don't record values of destinations set inside a libcall block
6251 	       since we might delete the libcall.  Things should have been set
6252 	       up so we won't want to reuse such a value, but we play it safe
6253 	       here.  */
6254 	    || libcall_insn
6255 	    /* If we didn't put a REG_EQUAL value or a source into the hash
6256 	       table, there is no point is recording DEST.  */
6257 	    || sets[i].src_elt == 0
6258 	    /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
6259 	       or SIGN_EXTEND, don't record DEST since it can cause
6260 	       some tracking to be wrong.
6261 
6262 	       ??? Think about this more later.  */
6263 	    || (GET_CODE (dest) == SUBREG
6264 		&& (GET_MODE_SIZE (GET_MODE (dest))
6265 		    > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6266 		&& (GET_CODE (sets[i].src) == SIGN_EXTEND
6267 		    || GET_CODE (sets[i].src) == ZERO_EXTEND)))
6268 	  continue;
6269 
6270 	/* STRICT_LOW_PART isn't part of the value BEING set,
6271 	   and neither is the SUBREG inside it.
6272 	   Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT.  */
6273 	if (GET_CODE (dest) == STRICT_LOW_PART)
6274 	  dest = SUBREG_REG (XEXP (dest, 0));
6275 
6276 	if (REG_P (dest) || GET_CODE (dest) == SUBREG)
6277 	  /* Registers must also be inserted into chains for quantities.  */
6278 	  if (insert_regs (dest, sets[i].src_elt, 1))
6279 	    {
6280 	      /* If `insert_regs' changes something, the hash code must be
6281 		 recalculated.  */
6282 	      rehash_using_reg (dest);
6283 	      sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6284 	    }
6285 
6286 	elt = insert (dest, sets[i].src_elt,
6287 		      sets[i].dest_hash, GET_MODE (dest));
6288 
6289 	elt->in_memory = (MEM_P (sets[i].inner_dest)
6290 			  && !MEM_READONLY_P (sets[i].inner_dest));
6291 
6292 	/* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6293 	   narrower than M2, and both M1 and M2 are the same number of words,
6294 	   we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6295 	   make that equivalence as well.
6296 
6297 	   However, BAR may have equivalences for which gen_lowpart
6298 	   will produce a simpler value than gen_lowpart applied to
6299 	   BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6300 	   BAR's equivalences.  If we don't get a simplified form, make
6301 	   the SUBREG.  It will not be used in an equivalence, but will
6302 	   cause two similar assignments to be detected.
6303 
6304 	   Note the loop below will find SUBREG_REG (DEST) since we have
6305 	   already entered SRC and DEST of the SET in the table.  */
6306 
6307 	if (GET_CODE (dest) == SUBREG
6308 	    && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6309 		 / UNITS_PER_WORD)
6310 		== (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
6311 	    && (GET_MODE_SIZE (GET_MODE (dest))
6312 		>= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6313 	    && sets[i].src_elt != 0)
6314 	  {
6315 	    enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6316 	    struct table_elt *elt, *classp = 0;
6317 
6318 	    for (elt = sets[i].src_elt->first_same_value; elt;
6319 		 elt = elt->next_same_value)
6320 	      {
6321 		rtx new_src = 0;
6322 		unsigned src_hash;
6323 		struct table_elt *src_elt;
6324 		int byte = 0;
6325 
6326 		/* Ignore invalid entries.  */
6327 		if (!REG_P (elt->exp)
6328 		    && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
6329 		  continue;
6330 
6331 		/* We may have already been playing subreg games.  If the
6332 		   mode is already correct for the destination, use it.  */
6333 		if (GET_MODE (elt->exp) == new_mode)
6334 		  new_src = elt->exp;
6335 		else
6336 		  {
6337 		    /* Calculate big endian correction for the SUBREG_BYTE.
6338 		       We have already checked that M1 (GET_MODE (dest))
6339 		       is not narrower than M2 (new_mode).  */
6340 		    if (BYTES_BIG_ENDIAN)
6341 		      byte = (GET_MODE_SIZE (GET_MODE (dest))
6342 			      - GET_MODE_SIZE (new_mode));
6343 
6344 		    new_src = simplify_gen_subreg (new_mode, elt->exp,
6345 					           GET_MODE (dest), byte);
6346 		  }
6347 
6348 		/* The call to simplify_gen_subreg fails if the value
6349 		   is VOIDmode, yet we can't do any simplification, e.g.
6350 		   for EXPR_LISTs denoting function call results.
6351 		   It is invalid to construct a SUBREG with a VOIDmode
6352 		   SUBREG_REG, hence a zero new_src means we can't do
6353 		   this substitution.  */
6354 		if (! new_src)
6355 		  continue;
6356 
6357 		src_hash = HASH (new_src, new_mode);
6358 		src_elt = lookup (new_src, src_hash, new_mode);
6359 
6360 		/* Put the new source in the hash table is if isn't
6361 		   already.  */
6362 		if (src_elt == 0)
6363 		  {
6364 		    if (insert_regs (new_src, classp, 0))
6365 		      {
6366 			rehash_using_reg (new_src);
6367 			src_hash = HASH (new_src, new_mode);
6368 		      }
6369 		    src_elt = insert (new_src, classp, src_hash, new_mode);
6370 		    src_elt->in_memory = elt->in_memory;
6371 		  }
6372 		else if (classp && classp != src_elt->first_same_value)
6373 		  /* Show that two things that we've seen before are
6374 		     actually the same.  */
6375 		  merge_equiv_classes (src_elt, classp);
6376 
6377 		classp = src_elt->first_same_value;
6378 		/* Ignore invalid entries.  */
6379 		while (classp
6380 		       && !REG_P (classp->exp)
6381 		       && ! exp_equiv_p (classp->exp, classp->exp, 1, false))
6382 		  classp = classp->next_same_value;
6383 	      }
6384 	  }
6385       }
6386 
6387   /* Special handling for (set REG0 REG1) where REG0 is the
6388      "cheapest", cheaper than REG1.  After cse, REG1 will probably not
6389      be used in the sequel, so (if easily done) change this insn to
6390      (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6391      that computed their value.  Then REG1 will become a dead store
6392      and won't cloud the situation for later optimizations.
6393 
6394      Do not make this change if REG1 is a hard register, because it will
6395      then be used in the sequel and we may be changing a two-operand insn
6396      into a three-operand insn.
6397 
6398      Also do not do this if we are operating on a copy of INSN.
6399 
6400      Also don't do this if INSN ends a libcall; this would cause an unrelated
6401      register to be set in the middle of a libcall, and we then get bad code
6402      if the libcall is deleted.  */
6403 
6404   if (n_sets == 1 && sets[0].rtl && REG_P (SET_DEST (sets[0].rtl))
6405       && NEXT_INSN (PREV_INSN (insn)) == insn
6406       && REG_P (SET_SRC (sets[0].rtl))
6407       && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6408       && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
6409     {
6410       int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6411       struct qty_table_elem *src_ent = &qty_table[src_q];
6412 
6413       if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6414 	  && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6415 	{
6416 	  rtx prev = insn;
6417 	  /* Scan for the previous nonnote insn, but stop at a basic
6418 	     block boundary.  */
6419 	  do
6420 	    {
6421 	      prev = PREV_INSN (prev);
6422 	    }
6423 	  while (prev && NOTE_P (prev)
6424 		 && NOTE_LINE_NUMBER (prev) != NOTE_INSN_BASIC_BLOCK);
6425 
6426 	  /* Do not swap the registers around if the previous instruction
6427 	     attaches a REG_EQUIV note to REG1.
6428 
6429 	     ??? It's not entirely clear whether we can transfer a REG_EQUIV
6430 	     from the pseudo that originally shadowed an incoming argument
6431 	     to another register.  Some uses of REG_EQUIV might rely on it
6432 	     being attached to REG1 rather than REG2.
6433 
6434 	     This section previously turned the REG_EQUIV into a REG_EQUAL
6435 	     note.  We cannot do that because REG_EQUIV may provide an
6436 	     uninitialized stack slot when REG_PARM_STACK_SPACE is used.  */
6437 
6438 	  if (prev != 0 && NONJUMP_INSN_P (prev)
6439 	      && GET_CODE (PATTERN (prev)) == SET
6440 	      && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6441 	      && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
6442 	    {
6443 	      rtx dest = SET_DEST (sets[0].rtl);
6444 	      rtx src = SET_SRC (sets[0].rtl);
6445 	      rtx note;
6446 
6447 	      validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6448 	      validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6449 	      validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
6450 	      apply_change_group ();
6451 
6452 	      /* If INSN has a REG_EQUAL note, and this note mentions
6453 		 REG0, then we must delete it, because the value in
6454 		 REG0 has changed.  If the note's value is REG1, we must
6455 		 also delete it because that is now this insn's dest.  */
6456 	      note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6457 	      if (note != 0
6458 		  && (reg_mentioned_p (dest, XEXP (note, 0))
6459 		      || rtx_equal_p (src, XEXP (note, 0))))
6460 		remove_note (insn, note);
6461 	    }
6462 	}
6463     }
6464 
6465   /* If this is a conditional jump insn, record any known equivalences due to
6466      the condition being tested.  */
6467 
6468   if (JUMP_P (insn)
6469       && n_sets == 1 && GET_CODE (x) == SET
6470       && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6471     record_jump_equiv (insn, 0);
6472 
6473 #ifdef HAVE_cc0
6474   /* If the previous insn set CC0 and this insn no longer references CC0,
6475      delete the previous insn.  Here we use the fact that nothing expects CC0
6476      to be valid over an insn, which is true until the final pass.  */
6477   if (prev_insn && NONJUMP_INSN_P (prev_insn)
6478       && (tem = single_set (prev_insn)) != 0
6479       && SET_DEST (tem) == cc0_rtx
6480       && ! reg_mentioned_p (cc0_rtx, x))
6481     delete_insn (prev_insn);
6482 
6483   prev_insn_cc0 = this_insn_cc0;
6484   prev_insn_cc0_mode = this_insn_cc0_mode;
6485   prev_insn = insn;
6486 #endif
6487 }
6488 
6489 /* Remove from the hash table all expressions that reference memory.  */
6490 
6491 static void
invalidate_memory(void)6492 invalidate_memory (void)
6493 {
6494   int i;
6495   struct table_elt *p, *next;
6496 
6497   for (i = 0; i < HASH_SIZE; i++)
6498     for (p = table[i]; p; p = next)
6499       {
6500 	next = p->next_same_hash;
6501 	if (p->in_memory)
6502 	  remove_from_table (p, i);
6503       }
6504 }
6505 
6506 /* If ADDR is an address that implicitly affects the stack pointer, return
6507    1 and update the register tables to show the effect.  Else, return 0.  */
6508 
6509 static int
addr_affects_sp_p(rtx addr)6510 addr_affects_sp_p (rtx addr)
6511 {
6512   if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
6513       && REG_P (XEXP (addr, 0))
6514       && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6515     {
6516       if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6517 	{
6518 	  REG_TICK (STACK_POINTER_REGNUM)++;
6519 	  /* Is it possible to use a subreg of SP?  */
6520 	  SUBREG_TICKED (STACK_POINTER_REGNUM) = -1;
6521 	}
6522 
6523       /* This should be *very* rare.  */
6524       if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6525 	invalidate (stack_pointer_rtx, VOIDmode);
6526 
6527       return 1;
6528     }
6529 
6530   return 0;
6531 }
6532 
6533 /* Perform invalidation on the basis of everything about an insn
6534    except for invalidating the actual places that are SET in it.
6535    This includes the places CLOBBERed, and anything that might
6536    alias with something that is SET or CLOBBERed.
6537 
6538    X is the pattern of the insn.  */
6539 
6540 static void
invalidate_from_clobbers(rtx x)6541 invalidate_from_clobbers (rtx x)
6542 {
6543   if (GET_CODE (x) == CLOBBER)
6544     {
6545       rtx ref = XEXP (x, 0);
6546       if (ref)
6547 	{
6548 	  if (REG_P (ref) || GET_CODE (ref) == SUBREG
6549 	      || MEM_P (ref))
6550 	    invalidate (ref, VOIDmode);
6551 	  else if (GET_CODE (ref) == STRICT_LOW_PART
6552 		   || GET_CODE (ref) == ZERO_EXTRACT)
6553 	    invalidate (XEXP (ref, 0), GET_MODE (ref));
6554 	}
6555     }
6556   else if (GET_CODE (x) == PARALLEL)
6557     {
6558       int i;
6559       for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6560 	{
6561 	  rtx y = XVECEXP (x, 0, i);
6562 	  if (GET_CODE (y) == CLOBBER)
6563 	    {
6564 	      rtx ref = XEXP (y, 0);
6565 	      if (REG_P (ref) || GET_CODE (ref) == SUBREG
6566 		  || MEM_P (ref))
6567 		invalidate (ref, VOIDmode);
6568 	      else if (GET_CODE (ref) == STRICT_LOW_PART
6569 		       || GET_CODE (ref) == ZERO_EXTRACT)
6570 		invalidate (XEXP (ref, 0), GET_MODE (ref));
6571 	    }
6572 	}
6573     }
6574 }
6575 
6576 /* Process X, part of the REG_NOTES of an insn.  Look at any REG_EQUAL notes
6577    and replace any registers in them with either an equivalent constant
6578    or the canonical form of the register.  If we are inside an address,
6579    only do this if the address remains valid.
6580 
6581    OBJECT is 0 except when within a MEM in which case it is the MEM.
6582 
6583    Return the replacement for X.  */
6584 
6585 static rtx
cse_process_notes(rtx x,rtx object)6586 cse_process_notes (rtx x, rtx object)
6587 {
6588   enum rtx_code code = GET_CODE (x);
6589   const char *fmt = GET_RTX_FORMAT (code);
6590   int i;
6591 
6592   switch (code)
6593     {
6594     case CONST_INT:
6595     case CONST:
6596     case SYMBOL_REF:
6597     case LABEL_REF:
6598     case CONST_DOUBLE:
6599     case CONST_VECTOR:
6600     case PC:
6601     case CC0:
6602     case LO_SUM:
6603       return x;
6604 
6605     case MEM:
6606       validate_change (x, &XEXP (x, 0),
6607 		       cse_process_notes (XEXP (x, 0), x), 0);
6608       return x;
6609 
6610     case EXPR_LIST:
6611     case INSN_LIST:
6612       if (REG_NOTE_KIND (x) == REG_EQUAL)
6613 	XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6614       if (XEXP (x, 1))
6615 	XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6616       return x;
6617 
6618     case SIGN_EXTEND:
6619     case ZERO_EXTEND:
6620     case SUBREG:
6621       {
6622 	rtx new = cse_process_notes (XEXP (x, 0), object);
6623 	/* We don't substitute VOIDmode constants into these rtx,
6624 	   since they would impede folding.  */
6625 	if (GET_MODE (new) != VOIDmode)
6626 	  validate_change (object, &XEXP (x, 0), new, 0);
6627 	return x;
6628       }
6629 
6630     case REG:
6631       i = REG_QTY (REGNO (x));
6632 
6633       /* Return a constant or a constant register.  */
6634       if (REGNO_QTY_VALID_P (REGNO (x)))
6635 	{
6636 	  struct qty_table_elem *ent = &qty_table[i];
6637 
6638 	  if (ent->const_rtx != NULL_RTX
6639 	      && (CONSTANT_P (ent->const_rtx)
6640 		  || REG_P (ent->const_rtx)))
6641 	    {
6642 	      rtx new = gen_lowpart (GET_MODE (x), ent->const_rtx);
6643 	      if (new)
6644 		return new;
6645 	    }
6646 	}
6647 
6648       /* Otherwise, canonicalize this register.  */
6649       return canon_reg (x, NULL_RTX);
6650 
6651     default:
6652       break;
6653     }
6654 
6655   for (i = 0; i < GET_RTX_LENGTH (code); i++)
6656     if (fmt[i] == 'e')
6657       validate_change (object, &XEXP (x, i),
6658 		       cse_process_notes (XEXP (x, i), object), 0);
6659 
6660   return x;
6661 }
6662 
6663 /* Process one SET of an insn that was skipped.  We ignore CLOBBERs
6664    since they are done elsewhere.  This function is called via note_stores.  */
6665 
6666 static void
invalidate_skipped_set(rtx dest,rtx set,void * data ATTRIBUTE_UNUSED)6667 invalidate_skipped_set (rtx dest, rtx set, void *data ATTRIBUTE_UNUSED)
6668 {
6669   enum rtx_code code = GET_CODE (dest);
6670 
6671   if (code == MEM
6672       && ! addr_affects_sp_p (dest)	/* If this is not a stack push ...  */
6673       /* There are times when an address can appear varying and be a PLUS
6674 	 during this scan when it would be a fixed address were we to know
6675 	 the proper equivalences.  So invalidate all memory if there is
6676 	 a BLKmode or nonscalar memory reference or a reference to a
6677 	 variable address.  */
6678       && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6679 	  || cse_rtx_varies_p (XEXP (dest, 0), 0)))
6680     {
6681       invalidate_memory ();
6682       return;
6683     }
6684 
6685   if (GET_CODE (set) == CLOBBER
6686       || CC0_P (dest)
6687       || dest == pc_rtx)
6688     return;
6689 
6690   if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6691     invalidate (XEXP (dest, 0), GET_MODE (dest));
6692   else if (code == REG || code == SUBREG || code == MEM)
6693     invalidate (dest, VOIDmode);
6694 }
6695 
6696 /* Invalidate all insns from START up to the end of the function or the
6697    next label.  This called when we wish to CSE around a block that is
6698    conditionally executed.  */
6699 
6700 static void
invalidate_skipped_block(rtx start)6701 invalidate_skipped_block (rtx start)
6702 {
6703   rtx insn;
6704 
6705   for (insn = start; insn && !LABEL_P (insn);
6706        insn = NEXT_INSN (insn))
6707     {
6708       if (! INSN_P (insn))
6709 	continue;
6710 
6711       if (CALL_P (insn))
6712 	{
6713 	  if (! CONST_OR_PURE_CALL_P (insn))
6714 	    invalidate_memory ();
6715 	  invalidate_for_call ();
6716 	}
6717 
6718       invalidate_from_clobbers (PATTERN (insn));
6719       note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6720     }
6721 }
6722 
6723 /* Find the end of INSN's basic block and return its range,
6724    the total number of SETs in all the insns of the block, the last insn of the
6725    block, and the branch path.
6726 
6727    The branch path indicates which branches should be followed.  If a nonzero
6728    path size is specified, the block should be rescanned and a different set
6729    of branches will be taken.  The branch path is only used if
6730    FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is nonzero.
6731 
6732    DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6733    used to describe the block.  It is filled in with the information about
6734    the current block.  The incoming structure's branch path, if any, is used
6735    to construct the output branch path.  */
6736 
6737 static void
cse_end_of_basic_block(rtx insn,struct cse_basic_block_data * data,int follow_jumps,int skip_blocks)6738 cse_end_of_basic_block (rtx insn, struct cse_basic_block_data *data,
6739 			int follow_jumps, int skip_blocks)
6740 {
6741   rtx p = insn, q;
6742   int nsets = 0;
6743   int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6744   rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
6745   int path_size = data->path_size;
6746   int path_entry = 0;
6747   int i;
6748 
6749   /* Update the previous branch path, if any.  If the last branch was
6750      previously PATH_TAKEN, mark it PATH_NOT_TAKEN.
6751      If it was previously PATH_NOT_TAKEN,
6752      shorten the path by one and look at the previous branch.  We know that
6753      at least one branch must have been taken if PATH_SIZE is nonzero.  */
6754   while (path_size > 0)
6755     {
6756       if (data->path[path_size - 1].status != PATH_NOT_TAKEN)
6757 	{
6758 	  data->path[path_size - 1].status = PATH_NOT_TAKEN;
6759 	  break;
6760 	}
6761       else
6762 	path_size--;
6763     }
6764 
6765   /* If the first instruction is marked with QImode, that means we've
6766      already processed this block.  Our caller will look at DATA->LAST
6767      to figure out where to go next.  We want to return the next block
6768      in the instruction stream, not some branched-to block somewhere
6769      else.  We accomplish this by pretending our called forbid us to
6770      follow jumps, or skip blocks.  */
6771   if (GET_MODE (insn) == QImode)
6772     follow_jumps = skip_blocks = 0;
6773 
6774   /* Scan to end of this basic block.  */
6775   while (p && !LABEL_P (p))
6776     {
6777       /* Don't cse over a call to setjmp; on some machines (eg VAX)
6778 	 the regs restored by the longjmp come from
6779 	 a later time than the setjmp.  */
6780       if (PREV_INSN (p) && CALL_P (PREV_INSN (p))
6781 	  && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
6782 	break;
6783 
6784       /* A PARALLEL can have lots of SETs in it,
6785 	 especially if it is really an ASM_OPERANDS.  */
6786       if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
6787 	nsets += XVECLEN (PATTERN (p), 0);
6788       else if (!NOTE_P (p))
6789 	nsets += 1;
6790 
6791       /* Ignore insns made by CSE; they cannot affect the boundaries of
6792 	 the basic block.  */
6793 
6794       if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6795 	high_cuid = INSN_CUID (p);
6796       if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6797 	low_cuid = INSN_CUID (p);
6798 
6799       /* See if this insn is in our branch path.  If it is and we are to
6800 	 take it, do so.  */
6801       if (path_entry < path_size && data->path[path_entry].branch == p)
6802 	{
6803 	  if (data->path[path_entry].status != PATH_NOT_TAKEN)
6804 	    p = JUMP_LABEL (p);
6805 
6806 	  /* Point to next entry in path, if any.  */
6807 	  path_entry++;
6808 	}
6809 
6810       /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6811 	 was specified, we haven't reached our maximum path length, there are
6812 	 insns following the target of the jump, this is the only use of the
6813 	 jump label, and the target label is preceded by a BARRIER.
6814 
6815 	 Alternatively, we can follow the jump if it branches around a
6816 	 block of code and there are no other branches into the block.
6817 	 In this case invalidate_skipped_block will be called to invalidate any
6818 	 registers set in the block when following the jump.  */
6819 
6820       else if ((follow_jumps || skip_blocks) && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH) - 1
6821 	       && JUMP_P (p)
6822 	       && GET_CODE (PATTERN (p)) == SET
6823 	       && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6824 	       && JUMP_LABEL (p) != 0
6825 	       && LABEL_NUSES (JUMP_LABEL (p)) == 1
6826 	       && NEXT_INSN (JUMP_LABEL (p)) != 0)
6827 	{
6828 	  for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6829 	    if ((!NOTE_P (q)
6830 		 || (PREV_INSN (q) && CALL_P (PREV_INSN (q))
6831 		     && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
6832 		&& (!LABEL_P (q) || LABEL_NUSES (q) != 0))
6833 	      break;
6834 
6835 	  /* If we ran into a BARRIER, this code is an extension of the
6836 	     basic block when the branch is taken.  */
6837 	  if (follow_jumps && q != 0 && BARRIER_P (q))
6838 	    {
6839 	      /* Don't allow ourself to keep walking around an
6840 		 always-executed loop.  */
6841 	      if (next_real_insn (q) == next)
6842 		{
6843 		  p = NEXT_INSN (p);
6844 		  continue;
6845 		}
6846 
6847 	      /* Similarly, don't put a branch in our path more than once.  */
6848 	      for (i = 0; i < path_entry; i++)
6849 		if (data->path[i].branch == p)
6850 		  break;
6851 
6852 	      if (i != path_entry)
6853 		break;
6854 
6855 	      data->path[path_entry].branch = p;
6856 	      data->path[path_entry++].status = PATH_TAKEN;
6857 
6858 	      /* This branch now ends our path.  It was possible that we
6859 		 didn't see this branch the last time around (when the
6860 		 insn in front of the target was a JUMP_INSN that was
6861 		 turned into a no-op).  */
6862 	      path_size = path_entry;
6863 
6864 	      p = JUMP_LABEL (p);
6865 	      /* Mark block so we won't scan it again later.  */
6866 	      PUT_MODE (NEXT_INSN (p), QImode);
6867 	    }
6868 	  /* Detect a branch around a block of code.  */
6869 	  else if (skip_blocks && q != 0 && !LABEL_P (q))
6870 	    {
6871 	      rtx tmp;
6872 
6873 	      if (next_real_insn (q) == next)
6874 		{
6875 		  p = NEXT_INSN (p);
6876 		  continue;
6877 		}
6878 
6879 	      for (i = 0; i < path_entry; i++)
6880 		if (data->path[i].branch == p)
6881 		  break;
6882 
6883 	      if (i != path_entry)
6884 		break;
6885 
6886 	      /* This is no_labels_between_p (p, q) with an added check for
6887 		 reaching the end of a function (in case Q precedes P).  */
6888 	      for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6889 		if (LABEL_P (tmp))
6890 		  break;
6891 
6892 	      if (tmp == q)
6893 		{
6894 		  data->path[path_entry].branch = p;
6895 		  data->path[path_entry++].status = PATH_AROUND;
6896 
6897 		  path_size = path_entry;
6898 
6899 		  p = JUMP_LABEL (p);
6900 		  /* Mark block so we won't scan it again later.  */
6901 		  PUT_MODE (NEXT_INSN (p), QImode);
6902 		}
6903 	    }
6904 	}
6905       p = NEXT_INSN (p);
6906     }
6907 
6908   data->low_cuid = low_cuid;
6909   data->high_cuid = high_cuid;
6910   data->nsets = nsets;
6911   data->last = p;
6912 
6913   /* If all jumps in the path are not taken, set our path length to zero
6914      so a rescan won't be done.  */
6915   for (i = path_size - 1; i >= 0; i--)
6916     if (data->path[i].status != PATH_NOT_TAKEN)
6917       break;
6918 
6919   if (i == -1)
6920     data->path_size = 0;
6921   else
6922     data->path_size = path_size;
6923 
6924   /* End the current branch path.  */
6925   data->path[path_size].branch = 0;
6926 }
6927 
6928 /* Perform cse on the instructions of a function.
6929    F is the first instruction.
6930    NREGS is one plus the highest pseudo-reg number used in the instruction.
6931 
6932    Returns 1 if jump_optimize should be redone due to simplifications
6933    in conditional jump instructions.  */
6934 
6935 int
cse_main(rtx f,int nregs)6936 cse_main (rtx f, int nregs)
6937 {
6938   struct cse_basic_block_data val;
6939   rtx insn = f;
6940   int i;
6941 
6942   init_cse_reg_info (nregs);
6943 
6944   val.path = XNEWVEC (struct branch_path, PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6945 
6946   cse_jumps_altered = 0;
6947   recorded_label_ref = 0;
6948   constant_pool_entries_cost = 0;
6949   constant_pool_entries_regcost = 0;
6950   val.path_size = 0;
6951   rtl_hooks = cse_rtl_hooks;
6952 
6953   init_recog ();
6954   init_alias_analysis ();
6955 
6956   reg_eqv_table = XNEWVEC (struct reg_eqv_elem, nregs);
6957 
6958   /* Find the largest uid.  */
6959 
6960   max_uid = get_max_uid ();
6961   uid_cuid = XCNEWVEC (int, max_uid + 1);
6962 
6963   /* Compute the mapping from uids to cuids.
6964      CUIDs are numbers assigned to insns, like uids,
6965      except that cuids increase monotonically through the code.
6966      Don't assign cuids to line-number NOTEs, so that the distance in cuids
6967      between two insns is not affected by -g.  */
6968 
6969   for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
6970     {
6971       if (!NOTE_P (insn)
6972 	  || NOTE_LINE_NUMBER (insn) < 0)
6973 	INSN_CUID (insn) = ++i;
6974       else
6975 	/* Give a line number note the same cuid as preceding insn.  */
6976 	INSN_CUID (insn) = i;
6977     }
6978 
6979   /* Loop over basic blocks.
6980      Compute the maximum number of qty's needed for each basic block
6981      (which is 2 for each SET).  */
6982   insn = f;
6983   while (insn)
6984     {
6985       cse_altered = 0;
6986       cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps,
6987 			      flag_cse_skip_blocks);
6988 
6989       /* If this basic block was already processed or has no sets, skip it.  */
6990       if (val.nsets == 0 || GET_MODE (insn) == QImode)
6991 	{
6992 	  PUT_MODE (insn, VOIDmode);
6993 	  insn = (val.last ? NEXT_INSN (val.last) : 0);
6994 	  val.path_size = 0;
6995 	  continue;
6996 	}
6997 
6998       cse_basic_block_start = val.low_cuid;
6999       cse_basic_block_end = val.high_cuid;
7000       max_qty = val.nsets * 2;
7001 
7002       if (dump_file)
7003 	fprintf (dump_file, ";; Processing block from %d to %d, %d sets.\n",
7004 		 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
7005 		 val.nsets);
7006 
7007       /* Make MAX_QTY bigger to give us room to optimize
7008 	 past the end of this basic block, if that should prove useful.  */
7009       if (max_qty < 500)
7010 	max_qty = 500;
7011 
7012       /* If this basic block is being extended by following certain jumps,
7013          (see `cse_end_of_basic_block'), we reprocess the code from the start.
7014          Otherwise, we start after this basic block.  */
7015       if (val.path_size > 0)
7016 	cse_basic_block (insn, val.last, val.path);
7017       else
7018 	{
7019 	  int old_cse_jumps_altered = cse_jumps_altered;
7020 	  rtx temp;
7021 
7022 	  /* When cse changes a conditional jump to an unconditional
7023 	     jump, we want to reprocess the block, since it will give
7024 	     us a new branch path to investigate.  */
7025 	  cse_jumps_altered = 0;
7026 	  temp = cse_basic_block (insn, val.last, val.path);
7027 	  if (cse_jumps_altered == 0
7028 	      || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7029 	    insn = temp;
7030 
7031 	  cse_jumps_altered |= old_cse_jumps_altered;
7032 	}
7033 
7034       if (cse_altered)
7035 	ggc_collect ();
7036 
7037 #ifdef USE_C_ALLOCA
7038       alloca (0);
7039 #endif
7040     }
7041 
7042   /* Clean up.  */
7043   end_alias_analysis ();
7044   free (uid_cuid);
7045   free (reg_eqv_table);
7046   free (val.path);
7047   rtl_hooks = general_rtl_hooks;
7048 
7049   return cse_jumps_altered || recorded_label_ref;
7050 }
7051 
7052 /* Process a single basic block.  FROM and TO and the limits of the basic
7053    block.  NEXT_BRANCH points to the branch path when following jumps or
7054    a null path when not following jumps.  */
7055 
7056 static rtx
cse_basic_block(rtx from,rtx to,struct branch_path * next_branch)7057 cse_basic_block (rtx from, rtx to, struct branch_path *next_branch)
7058 {
7059   rtx insn;
7060   int to_usage = 0;
7061   rtx libcall_insn = NULL_RTX;
7062   int num_insns = 0;
7063   int no_conflict = 0;
7064 
7065   /* Allocate the space needed by qty_table.  */
7066   qty_table = XNEWVEC (struct qty_table_elem, max_qty);
7067 
7068   new_basic_block ();
7069 
7070   /* TO might be a label.  If so, protect it from being deleted.  */
7071   if (to != 0 && LABEL_P (to))
7072     ++LABEL_NUSES (to);
7073 
7074   for (insn = from; insn != to; insn = NEXT_INSN (insn))
7075     {
7076       enum rtx_code code = GET_CODE (insn);
7077 
7078       /* If we have processed 1,000 insns, flush the hash table to
7079 	 avoid extreme quadratic behavior.  We must not include NOTEs
7080 	 in the count since there may be more of them when generating
7081 	 debugging information.  If we clear the table at different
7082 	 times, code generated with -g -O might be different than code
7083 	 generated with -O but not -g.
7084 
7085 	 ??? This is a real kludge and needs to be done some other way.
7086 	 Perhaps for 2.9.  */
7087       if (code != NOTE && num_insns++ > PARAM_VALUE (PARAM_MAX_CSE_INSNS))
7088 	{
7089 	  flush_hash_table ();
7090 	  num_insns = 0;
7091 	}
7092 
7093       /* See if this is a branch that is part of the path.  If so, and it is
7094 	 to be taken, do so.  */
7095       if (next_branch->branch == insn)
7096 	{
7097 	  enum taken status = next_branch++->status;
7098 	  if (status != PATH_NOT_TAKEN)
7099 	    {
7100 	      if (status == PATH_TAKEN)
7101 		record_jump_equiv (insn, 1);
7102 	      else
7103 		invalidate_skipped_block (NEXT_INSN (insn));
7104 
7105 	      /* Set the last insn as the jump insn; it doesn't affect cc0.
7106 		 Then follow this branch.  */
7107 #ifdef HAVE_cc0
7108 	      prev_insn_cc0 = 0;
7109 	      prev_insn = insn;
7110 #endif
7111 	      insn = JUMP_LABEL (insn);
7112 	      continue;
7113 	    }
7114 	}
7115 
7116       if (GET_MODE (insn) == QImode)
7117 	PUT_MODE (insn, VOIDmode);
7118 
7119       if (GET_RTX_CLASS (code) == RTX_INSN)
7120 	{
7121 	  rtx p;
7122 
7123 	  /* Process notes first so we have all notes in canonical forms when
7124 	     looking for duplicate operations.  */
7125 
7126 	  if (REG_NOTES (insn))
7127 	    REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7128 
7129 	  /* Track when we are inside in LIBCALL block.  Inside such a block,
7130 	     we do not want to record destinations.  The last insn of a
7131 	     LIBCALL block is not considered to be part of the block, since
7132 	     its destination is the result of the block and hence should be
7133 	     recorded.  */
7134 
7135 	  if (REG_NOTES (insn) != 0)
7136 	    {
7137 	      if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
7138 		libcall_insn = XEXP (p, 0);
7139 	      else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7140 		{
7141 		  /* Keep libcall_insn for the last SET insn of a no-conflict
7142 		     block to prevent changing the destination.  */
7143 		  if (! no_conflict)
7144 		    libcall_insn = 0;
7145 		  else
7146 		    no_conflict = -1;
7147 		}
7148 	      else if (find_reg_note (insn, REG_NO_CONFLICT, NULL_RTX))
7149 		no_conflict = 1;
7150 	    }
7151 
7152 	  cse_insn (insn, libcall_insn);
7153 
7154 	  if (no_conflict == -1)
7155 	    {
7156 	      libcall_insn = 0;
7157 	      no_conflict = 0;
7158 	    }
7159 
7160 	  /* If we haven't already found an insn where we added a LABEL_REF,
7161 	     check this one.  */
7162 	  if (NONJUMP_INSN_P (insn) && ! recorded_label_ref
7163 	      && for_each_rtx (&PATTERN (insn), check_for_label_ref,
7164 			       (void *) insn))
7165 	    recorded_label_ref = 1;
7166 	}
7167 
7168       /* If INSN is now an unconditional jump, skip to the end of our
7169 	 basic block by pretending that we just did the last insn in the
7170 	 basic block.  If we are jumping to the end of our block, show
7171 	 that we can have one usage of TO.  */
7172 
7173       if (any_uncondjump_p (insn))
7174 	{
7175 	  if (to == 0)
7176 	    {
7177 	      free (qty_table);
7178 	      return 0;
7179 	    }
7180 
7181 	  if (JUMP_LABEL (insn) == to)
7182 	    to_usage = 1;
7183 
7184 	  /* Maybe TO was deleted because the jump is unconditional.
7185 	     If so, there is nothing left in this basic block.  */
7186 	  /* ??? Perhaps it would be smarter to set TO
7187 	     to whatever follows this insn,
7188 	     and pretend the basic block had always ended here.  */
7189 	  if (INSN_DELETED_P (to))
7190 	    break;
7191 
7192 	  insn = PREV_INSN (to);
7193 	}
7194 
7195       /* See if it is ok to keep on going past the label
7196 	 which used to end our basic block.  Remember that we incremented
7197 	 the count of that label, so we decrement it here.  If we made
7198 	 a jump unconditional, TO_USAGE will be one; in that case, we don't
7199 	 want to count the use in that jump.  */
7200 
7201       if (to != 0 && NEXT_INSN (insn) == to
7202 	  && LABEL_P (to) && --LABEL_NUSES (to) == to_usage)
7203 	{
7204 	  struct cse_basic_block_data val;
7205 	  rtx prev;
7206 
7207 	  insn = NEXT_INSN (to);
7208 
7209 	  /* If TO was the last insn in the function, we are done.  */
7210 	  if (insn == 0)
7211 	    {
7212 	      free (qty_table);
7213 	      return 0;
7214 	    }
7215 
7216 	  /* If TO was preceded by a BARRIER we are done with this block
7217 	     because it has no continuation.  */
7218 	  prev = prev_nonnote_insn (to);
7219 	  if (prev && BARRIER_P (prev))
7220 	    {
7221 	      free (qty_table);
7222 	      return insn;
7223 	    }
7224 
7225 	  /* Find the end of the following block.  Note that we won't be
7226 	     following branches in this case.  */
7227 	  to_usage = 0;
7228 	  val.path_size = 0;
7229 	  val.path = XNEWVEC (struct branch_path, PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
7230 	  cse_end_of_basic_block (insn, &val, 0, 0);
7231 	  free (val.path);
7232 
7233 	  /* If the tables we allocated have enough space left
7234 	     to handle all the SETs in the next basic block,
7235 	     continue through it.  Otherwise, return,
7236 	     and that block will be scanned individually.  */
7237 	  if (val.nsets * 2 + next_qty > max_qty)
7238 	    break;
7239 
7240 	  cse_basic_block_start = val.low_cuid;
7241 	  cse_basic_block_end = val.high_cuid;
7242 	  to = val.last;
7243 
7244 	  /* Prevent TO from being deleted if it is a label.  */
7245 	  if (to != 0 && LABEL_P (to))
7246 	    ++LABEL_NUSES (to);
7247 
7248 	  /* Back up so we process the first insn in the extension.  */
7249 	  insn = PREV_INSN (insn);
7250 	}
7251     }
7252 
7253   gcc_assert (next_qty <= max_qty);
7254 
7255   free (qty_table);
7256 
7257   return to ? NEXT_INSN (to) : 0;
7258 }
7259 
7260 /* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
7261    there isn't a REG_LABEL note.  Return one if so.  DATA is the insn.  */
7262 
7263 static int
check_for_label_ref(rtx * rtl,void * data)7264 check_for_label_ref (rtx *rtl, void *data)
7265 {
7266   rtx insn = (rtx) data;
7267 
7268   /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7269      we must rerun jump since it needs to place the note.  If this is a
7270      LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
7271      since no REG_LABEL will be added.  */
7272   return (GET_CODE (*rtl) == LABEL_REF
7273 	  && ! LABEL_REF_NONLOCAL_P (*rtl)
7274 	  && LABEL_P (XEXP (*rtl, 0))
7275 	  && INSN_UID (XEXP (*rtl, 0)) != 0
7276 	  && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7277 }
7278 
7279 /* Count the number of times registers are used (not set) in X.
7280    COUNTS is an array in which we accumulate the count, INCR is how much
7281    we count each register usage.
7282 
7283    Don't count a usage of DEST, which is the SET_DEST of a SET which
7284    contains X in its SET_SRC.  This is because such a SET does not
7285    modify the liveness of DEST.
7286    DEST is set to pc_rtx for a trapping insn, which means that we must count
7287    uses of a SET_DEST regardless because the insn can't be deleted here.  */
7288 
7289 static void
count_reg_usage(rtx x,int * counts,rtx dest,int incr)7290 count_reg_usage (rtx x, int *counts, rtx dest, int incr)
7291 {
7292   enum rtx_code code;
7293   rtx note;
7294   const char *fmt;
7295   int i, j;
7296 
7297   if (x == 0)
7298     return;
7299 
7300   switch (code = GET_CODE (x))
7301     {
7302     case REG:
7303       if (x != dest)
7304 	counts[REGNO (x)] += incr;
7305       return;
7306 
7307     case PC:
7308     case CC0:
7309     case CONST:
7310     case CONST_INT:
7311     case CONST_DOUBLE:
7312     case CONST_VECTOR:
7313     case SYMBOL_REF:
7314     case LABEL_REF:
7315       return;
7316 
7317     case CLOBBER:
7318       /* If we are clobbering a MEM, mark any registers inside the address
7319          as being used.  */
7320       if (MEM_P (XEXP (x, 0)))
7321 	count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
7322       return;
7323 
7324     case SET:
7325       /* Unless we are setting a REG, count everything in SET_DEST.  */
7326       if (!REG_P (SET_DEST (x)))
7327 	count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
7328       count_reg_usage (SET_SRC (x), counts,
7329 		       dest ? dest : SET_DEST (x),
7330 		       incr);
7331       return;
7332 
7333     case CALL_INSN:
7334     case INSN:
7335     case JUMP_INSN:
7336     /* We expect dest to be NULL_RTX here.  If the insn may trap, mark
7337        this fact by setting DEST to pc_rtx.  */
7338       if (flag_non_call_exceptions && may_trap_p (PATTERN (x)))
7339 	dest = pc_rtx;
7340       if (code == CALL_INSN)
7341 	count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, dest, incr);
7342       count_reg_usage (PATTERN (x), counts, dest, incr);
7343 
7344       /* Things used in a REG_EQUAL note aren't dead since loop may try to
7345 	 use them.  */
7346 
7347       note = find_reg_equal_equiv_note (x);
7348       if (note)
7349 	{
7350 	  rtx eqv = XEXP (note, 0);
7351 
7352 	  if (GET_CODE (eqv) == EXPR_LIST)
7353 	  /* This REG_EQUAL note describes the result of a function call.
7354 	     Process all the arguments.  */
7355 	    do
7356 	      {
7357 		count_reg_usage (XEXP (eqv, 0), counts, dest, incr);
7358 		eqv = XEXP (eqv, 1);
7359 	      }
7360 	    while (eqv && GET_CODE (eqv) == EXPR_LIST);
7361 	  else
7362 	    count_reg_usage (eqv, counts, dest, incr);
7363 	}
7364       return;
7365 
7366     case EXPR_LIST:
7367       if (REG_NOTE_KIND (x) == REG_EQUAL
7368 	  || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
7369 	  /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
7370 	     involving registers in the address.  */
7371 	  || GET_CODE (XEXP (x, 0)) == CLOBBER)
7372 	count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
7373 
7374       count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
7375       return;
7376 
7377     case ASM_OPERANDS:
7378       /* If the asm is volatile, then this insn cannot be deleted,
7379 	 and so the inputs *must* be live.  */
7380       if (MEM_VOLATILE_P (x))
7381 	dest = NULL_RTX;
7382       /* Iterate over just the inputs, not the constraints as well.  */
7383       for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
7384 	count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, dest, incr);
7385       return;
7386 
7387     case INSN_LIST:
7388       gcc_unreachable ();
7389 
7390     default:
7391       break;
7392     }
7393 
7394   fmt = GET_RTX_FORMAT (code);
7395   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7396     {
7397       if (fmt[i] == 'e')
7398 	count_reg_usage (XEXP (x, i), counts, dest, incr);
7399       else if (fmt[i] == 'E')
7400 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7401 	  count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
7402     }
7403 }
7404 
7405 /* Return true if set is live.  */
7406 static bool
set_live_p(rtx set,rtx insn ATTRIBUTE_UNUSED,int * counts)7407 set_live_p (rtx set, rtx insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0.  */
7408 	    int *counts)
7409 {
7410 #ifdef HAVE_cc0
7411   rtx tem;
7412 #endif
7413 
7414   if (set_noop_p (set))
7415     ;
7416 
7417 #ifdef HAVE_cc0
7418   else if (GET_CODE (SET_DEST (set)) == CC0
7419 	   && !side_effects_p (SET_SRC (set))
7420 	   && ((tem = next_nonnote_insn (insn)) == 0
7421 	       || !INSN_P (tem)
7422 	       || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7423     return false;
7424 #endif
7425   else if (!REG_P (SET_DEST (set))
7426 	   || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7427 	   || counts[REGNO (SET_DEST (set))] != 0
7428 	   || side_effects_p (SET_SRC (set)))
7429     return true;
7430   return false;
7431 }
7432 
7433 /* Return true if insn is live.  */
7434 
7435 static bool
insn_live_p(rtx insn,int * counts)7436 insn_live_p (rtx insn, int *counts)
7437 {
7438   int i;
7439   if (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
7440     return true;
7441   else if (GET_CODE (PATTERN (insn)) == SET)
7442     return set_live_p (PATTERN (insn), insn, counts);
7443   else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7444     {
7445       for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7446 	{
7447 	  rtx elt = XVECEXP (PATTERN (insn), 0, i);
7448 
7449 	  if (GET_CODE (elt) == SET)
7450 	    {
7451 	      if (set_live_p (elt, insn, counts))
7452 		return true;
7453 	    }
7454 	  else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7455 	    return true;
7456 	}
7457       return false;
7458     }
7459   else
7460     return true;
7461 }
7462 
7463 /* Return true if libcall is dead as a whole.  */
7464 
7465 static bool
dead_libcall_p(rtx insn,int * counts)7466 dead_libcall_p (rtx insn, int *counts)
7467 {
7468   rtx note, set, new;
7469 
7470   /* See if there's a REG_EQUAL note on this insn and try to
7471      replace the source with the REG_EQUAL expression.
7472 
7473      We assume that insns with REG_RETVALs can only be reg->reg
7474      copies at this point.  */
7475   note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7476   if (!note)
7477     return false;
7478 
7479   set = single_set (insn);
7480   if (!set)
7481     return false;
7482 
7483   new = simplify_rtx (XEXP (note, 0));
7484   if (!new)
7485     new = XEXP (note, 0);
7486 
7487   /* While changing insn, we must update the counts accordingly.  */
7488   count_reg_usage (insn, counts, NULL_RTX, -1);
7489 
7490   if (validate_change (insn, &SET_SRC (set), new, 0))
7491     {
7492       count_reg_usage (insn, counts, NULL_RTX, 1);
7493       remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7494       remove_note (insn, note);
7495       return true;
7496     }
7497 
7498   if (CONSTANT_P (new))
7499     {
7500       new = force_const_mem (GET_MODE (SET_DEST (set)), new);
7501       if (new && validate_change (insn, &SET_SRC (set), new, 0))
7502 	{
7503 	  count_reg_usage (insn, counts, NULL_RTX, 1);
7504 	  remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7505 	  remove_note (insn, note);
7506 	  return true;
7507 	}
7508     }
7509 
7510   count_reg_usage (insn, counts, NULL_RTX, 1);
7511   return false;
7512 }
7513 
7514 /* Scan all the insns and delete any that are dead; i.e., they store a register
7515    that is never used or they copy a register to itself.
7516 
7517    This is used to remove insns made obviously dead by cse, loop or other
7518    optimizations.  It improves the heuristics in loop since it won't try to
7519    move dead invariants out of loops or make givs for dead quantities.  The
7520    remaining passes of the compilation are also sped up.  */
7521 
7522 int
delete_trivially_dead_insns(rtx insns,int nreg)7523 delete_trivially_dead_insns (rtx insns, int nreg)
7524 {
7525   int *counts;
7526   rtx insn, prev;
7527   int in_libcall = 0, dead_libcall = 0;
7528   int ndead = 0;
7529 
7530   timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7531   /* First count the number of times each register is used.  */
7532   counts = XCNEWVEC (int, nreg);
7533   for (insn = insns; insn; insn = NEXT_INSN (insn))
7534     if (INSN_P (insn))
7535       count_reg_usage (insn, counts, NULL_RTX, 1);
7536 
7537   /* Go from the last insn to the first and delete insns that only set unused
7538      registers or copy a register to itself.  As we delete an insn, remove
7539      usage counts for registers it uses.
7540 
7541      The first jump optimization pass may leave a real insn as the last
7542      insn in the function.   We must not skip that insn or we may end
7543      up deleting code that is not really dead.  */
7544   for (insn = get_last_insn (); insn; insn = prev)
7545     {
7546       int live_insn = 0;
7547 
7548       prev = PREV_INSN (insn);
7549       if (!INSN_P (insn))
7550 	continue;
7551 
7552       /* Don't delete any insns that are part of a libcall block unless
7553 	 we can delete the whole libcall block.
7554 
7555 	 Flow or loop might get confused if we did that.  Remember
7556 	 that we are scanning backwards.  */
7557       if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7558 	{
7559 	  in_libcall = 1;
7560 	  live_insn = 1;
7561 	  dead_libcall = dead_libcall_p (insn, counts);
7562 	}
7563       else if (in_libcall)
7564 	live_insn = ! dead_libcall;
7565       else
7566 	live_insn = insn_live_p (insn, counts);
7567 
7568       /* If this is a dead insn, delete it and show registers in it aren't
7569 	 being used.  */
7570 
7571       if (! live_insn)
7572 	{
7573 	  count_reg_usage (insn, counts, NULL_RTX, -1);
7574 	  delete_insn_and_edges (insn);
7575 	  ndead++;
7576 	}
7577 
7578       if (in_libcall && find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7579 	{
7580 	  in_libcall = 0;
7581 	  dead_libcall = 0;
7582 	}
7583     }
7584 
7585   if (dump_file && ndead)
7586     fprintf (dump_file, "Deleted %i trivially dead insns\n",
7587 	     ndead);
7588   /* Clean up.  */
7589   free (counts);
7590   timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7591   return ndead;
7592 }
7593 
7594 /* This function is called via for_each_rtx.  The argument, NEWREG, is
7595    a condition code register with the desired mode.  If we are looking
7596    at the same register in a different mode, replace it with
7597    NEWREG.  */
7598 
7599 static int
cse_change_cc_mode(rtx * loc,void * data)7600 cse_change_cc_mode (rtx *loc, void *data)
7601 {
7602   struct change_cc_mode_args* args = (struct change_cc_mode_args*)data;
7603 
7604   if (*loc
7605       && REG_P (*loc)
7606       && REGNO (*loc) == REGNO (args->newreg)
7607       && GET_MODE (*loc) != GET_MODE (args->newreg))
7608     {
7609       validate_change (args->insn, loc, args->newreg, 1);
7610 
7611       return -1;
7612     }
7613   return 0;
7614 }
7615 
7616 /* Change the mode of any reference to the register REGNO (NEWREG) to
7617    GET_MODE (NEWREG) in INSN.  */
7618 
7619 static void
cse_change_cc_mode_insn(rtx insn,rtx newreg)7620 cse_change_cc_mode_insn (rtx insn, rtx newreg)
7621 {
7622   struct change_cc_mode_args args;
7623   int success;
7624 
7625   if (!INSN_P (insn))
7626     return;
7627 
7628   args.insn = insn;
7629   args.newreg = newreg;
7630 
7631   for_each_rtx (&PATTERN (insn), cse_change_cc_mode, &args);
7632   for_each_rtx (&REG_NOTES (insn), cse_change_cc_mode, &args);
7633 
7634   /* If the following assertion was triggered, there is most probably
7635      something wrong with the cc_modes_compatible back end function.
7636      CC modes only can be considered compatible if the insn - with the mode
7637      replaced by any of the compatible modes - can still be recognized.  */
7638   success = apply_change_group ();
7639   gcc_assert (success);
7640 }
7641 
7642 /* Change the mode of any reference to the register REGNO (NEWREG) to
7643    GET_MODE (NEWREG), starting at START.  Stop before END.  Stop at
7644    any instruction which modifies NEWREG.  */
7645 
7646 static void
cse_change_cc_mode_insns(rtx start,rtx end,rtx newreg)7647 cse_change_cc_mode_insns (rtx start, rtx end, rtx newreg)
7648 {
7649   rtx insn;
7650 
7651   for (insn = start; insn != end; insn = NEXT_INSN (insn))
7652     {
7653       if (! INSN_P (insn))
7654 	continue;
7655 
7656       if (reg_set_p (newreg, insn))
7657 	return;
7658 
7659       cse_change_cc_mode_insn (insn, newreg);
7660     }
7661 }
7662 
7663 /* BB is a basic block which finishes with CC_REG as a condition code
7664    register which is set to CC_SRC.  Look through the successors of BB
7665    to find blocks which have a single predecessor (i.e., this one),
7666    and look through those blocks for an assignment to CC_REG which is
7667    equivalent to CC_SRC.  CAN_CHANGE_MODE indicates whether we are
7668    permitted to change the mode of CC_SRC to a compatible mode.  This
7669    returns VOIDmode if no equivalent assignments were found.
7670    Otherwise it returns the mode which CC_SRC should wind up with.
7671 
7672    The main complexity in this function is handling the mode issues.
7673    We may have more than one duplicate which we can eliminate, and we
7674    try to find a mode which will work for multiple duplicates.  */
7675 
7676 static enum machine_mode
cse_cc_succs(basic_block bb,rtx cc_reg,rtx cc_src,bool can_change_mode)7677 cse_cc_succs (basic_block bb, rtx cc_reg, rtx cc_src, bool can_change_mode)
7678 {
7679   bool found_equiv;
7680   enum machine_mode mode;
7681   unsigned int insn_count;
7682   edge e;
7683   rtx insns[2];
7684   enum machine_mode modes[2];
7685   rtx last_insns[2];
7686   unsigned int i;
7687   rtx newreg;
7688   edge_iterator ei;
7689 
7690   /* We expect to have two successors.  Look at both before picking
7691      the final mode for the comparison.  If we have more successors
7692      (i.e., some sort of table jump, although that seems unlikely),
7693      then we require all beyond the first two to use the same
7694      mode.  */
7695 
7696   found_equiv = false;
7697   mode = GET_MODE (cc_src);
7698   insn_count = 0;
7699   FOR_EACH_EDGE (e, ei, bb->succs)
7700     {
7701       rtx insn;
7702       rtx end;
7703 
7704       if (e->flags & EDGE_COMPLEX)
7705 	continue;
7706 
7707       if (EDGE_COUNT (e->dest->preds) != 1
7708 	  || e->dest == EXIT_BLOCK_PTR)
7709 	continue;
7710 
7711       end = NEXT_INSN (BB_END (e->dest));
7712       for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
7713 	{
7714 	  rtx set;
7715 
7716 	  if (! INSN_P (insn))
7717 	    continue;
7718 
7719 	  /* If CC_SRC is modified, we have to stop looking for
7720 	     something which uses it.  */
7721 	  if (modified_in_p (cc_src, insn))
7722 	    break;
7723 
7724 	  /* Check whether INSN sets CC_REG to CC_SRC.  */
7725 	  set = single_set (insn);
7726 	  if (set
7727 	      && REG_P (SET_DEST (set))
7728 	      && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7729 	    {
7730 	      bool found;
7731 	      enum machine_mode set_mode;
7732 	      enum machine_mode comp_mode;
7733 
7734 	      found = false;
7735 	      set_mode = GET_MODE (SET_SRC (set));
7736 	      comp_mode = set_mode;
7737 	      if (rtx_equal_p (cc_src, SET_SRC (set)))
7738 		found = true;
7739 	      else if (GET_CODE (cc_src) == COMPARE
7740 		       && GET_CODE (SET_SRC (set)) == COMPARE
7741 		       && mode != set_mode
7742 		       && rtx_equal_p (XEXP (cc_src, 0),
7743 				       XEXP (SET_SRC (set), 0))
7744 		       && rtx_equal_p (XEXP (cc_src, 1),
7745 				       XEXP (SET_SRC (set), 1)))
7746 
7747 		{
7748 		  comp_mode = targetm.cc_modes_compatible (mode, set_mode);
7749 		  if (comp_mode != VOIDmode
7750 		      && (can_change_mode || comp_mode == mode))
7751 		    found = true;
7752 		}
7753 
7754 	      if (found)
7755 		{
7756 		  found_equiv = true;
7757 		  if (insn_count < ARRAY_SIZE (insns))
7758 		    {
7759 		      insns[insn_count] = insn;
7760 		      modes[insn_count] = set_mode;
7761 		      last_insns[insn_count] = end;
7762 		      ++insn_count;
7763 
7764 		      if (mode != comp_mode)
7765 			{
7766 			  gcc_assert (can_change_mode);
7767 			  mode = comp_mode;
7768 
7769 			  /* The modified insn will be re-recognized later.  */
7770 			  PUT_MODE (cc_src, mode);
7771 			}
7772 		    }
7773 		  else
7774 		    {
7775 		      if (set_mode != mode)
7776 			{
7777 			  /* We found a matching expression in the
7778 			     wrong mode, but we don't have room to
7779 			     store it in the array.  Punt.  This case
7780 			     should be rare.  */
7781 			  break;
7782 			}
7783 		      /* INSN sets CC_REG to a value equal to CC_SRC
7784 			 with the right mode.  We can simply delete
7785 			 it.  */
7786 		      delete_insn (insn);
7787 		    }
7788 
7789 		  /* We found an instruction to delete.  Keep looking,
7790 		     in the hopes of finding a three-way jump.  */
7791 		  continue;
7792 		}
7793 
7794 	      /* We found an instruction which sets the condition
7795 		 code, so don't look any farther.  */
7796 	      break;
7797 	    }
7798 
7799 	  /* If INSN sets CC_REG in some other way, don't look any
7800 	     farther.  */
7801 	  if (reg_set_p (cc_reg, insn))
7802 	    break;
7803 	}
7804 
7805       /* If we fell off the bottom of the block, we can keep looking
7806 	 through successors.  We pass CAN_CHANGE_MODE as false because
7807 	 we aren't prepared to handle compatibility between the
7808 	 further blocks and this block.  */
7809       if (insn == end)
7810 	{
7811 	  enum machine_mode submode;
7812 
7813 	  submode = cse_cc_succs (e->dest, cc_reg, cc_src, false);
7814 	  if (submode != VOIDmode)
7815 	    {
7816 	      gcc_assert (submode == mode);
7817 	      found_equiv = true;
7818 	      can_change_mode = false;
7819 	    }
7820 	}
7821     }
7822 
7823   if (! found_equiv)
7824     return VOIDmode;
7825 
7826   /* Now INSN_COUNT is the number of instructions we found which set
7827      CC_REG to a value equivalent to CC_SRC.  The instructions are in
7828      INSNS.  The modes used by those instructions are in MODES.  */
7829 
7830   newreg = NULL_RTX;
7831   for (i = 0; i < insn_count; ++i)
7832     {
7833       if (modes[i] != mode)
7834 	{
7835 	  /* We need to change the mode of CC_REG in INSNS[i] and
7836 	     subsequent instructions.  */
7837 	  if (! newreg)
7838 	    {
7839 	      if (GET_MODE (cc_reg) == mode)
7840 		newreg = cc_reg;
7841 	      else
7842 		newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7843 	    }
7844 	  cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7845 				    newreg);
7846 	}
7847 
7848       delete_insn (insns[i]);
7849     }
7850 
7851   return mode;
7852 }
7853 
7854 /* If we have a fixed condition code register (or two), walk through
7855    the instructions and try to eliminate duplicate assignments.  */
7856 
7857 static void
cse_condition_code_reg(void)7858 cse_condition_code_reg (void)
7859 {
7860   unsigned int cc_regno_1;
7861   unsigned int cc_regno_2;
7862   rtx cc_reg_1;
7863   rtx cc_reg_2;
7864   basic_block bb;
7865 
7866   if (! targetm.fixed_condition_code_regs (&cc_regno_1, &cc_regno_2))
7867     return;
7868 
7869   cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7870   if (cc_regno_2 != INVALID_REGNUM)
7871     cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7872   else
7873     cc_reg_2 = NULL_RTX;
7874 
7875   FOR_EACH_BB (bb)
7876     {
7877       rtx last_insn;
7878       rtx cc_reg;
7879       rtx insn;
7880       rtx cc_src_insn;
7881       rtx cc_src;
7882       enum machine_mode mode;
7883       enum machine_mode orig_mode;
7884 
7885       /* Look for blocks which end with a conditional jump based on a
7886 	 condition code register.  Then look for the instruction which
7887 	 sets the condition code register.  Then look through the
7888 	 successor blocks for instructions which set the condition
7889 	 code register to the same value.  There are other possible
7890 	 uses of the condition code register, but these are by far the
7891 	 most common and the ones which we are most likely to be able
7892 	 to optimize.  */
7893 
7894       last_insn = BB_END (bb);
7895       if (!JUMP_P (last_insn))
7896 	continue;
7897 
7898       if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7899 	cc_reg = cc_reg_1;
7900       else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7901 	cc_reg = cc_reg_2;
7902       else
7903 	continue;
7904 
7905       cc_src_insn = NULL_RTX;
7906       cc_src = NULL_RTX;
7907       for (insn = PREV_INSN (last_insn);
7908 	   insn && insn != PREV_INSN (BB_HEAD (bb));
7909 	   insn = PREV_INSN (insn))
7910 	{
7911 	  rtx set;
7912 
7913 	  if (! INSN_P (insn))
7914 	    continue;
7915 	  set = single_set (insn);
7916 	  if (set
7917 	      && REG_P (SET_DEST (set))
7918 	      && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7919 	    {
7920 	      cc_src_insn = insn;
7921 	      cc_src = SET_SRC (set);
7922 	      break;
7923 	    }
7924 	  else if (reg_set_p (cc_reg, insn))
7925 	    break;
7926 	}
7927 
7928       if (! cc_src_insn)
7929 	continue;
7930 
7931       if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7932 	continue;
7933 
7934       /* Now CC_REG is a condition code register used for a
7935 	 conditional jump at the end of the block, and CC_SRC, in
7936 	 CC_SRC_INSN, is the value to which that condition code
7937 	 register is set, and CC_SRC is still meaningful at the end of
7938 	 the basic block.  */
7939 
7940       orig_mode = GET_MODE (cc_src);
7941       mode = cse_cc_succs (bb, cc_reg, cc_src, true);
7942       if (mode != VOIDmode)
7943 	{
7944 	  gcc_assert (mode == GET_MODE (cc_src));
7945 	  if (mode != orig_mode)
7946 	    {
7947 	      rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7948 
7949 	      cse_change_cc_mode_insn (cc_src_insn, newreg);
7950 
7951 	      /* Do the same in the following insns that use the
7952 		 current value of CC_REG within BB.  */
7953 	      cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
7954 					NEXT_INSN (last_insn),
7955 					newreg);
7956 	    }
7957 	}
7958     }
7959 }
7960 
7961 
7962 /* Perform common subexpression elimination.  Nonzero value from
7963    `cse_main' means that jumps were simplified and some code may now
7964    be unreachable, so do jump optimization again.  */
7965 static bool
gate_handle_cse(void)7966 gate_handle_cse (void)
7967 {
7968   return optimize > 0;
7969 }
7970 
7971 static unsigned int
rest_of_handle_cse(void)7972 rest_of_handle_cse (void)
7973 {
7974   int tem;
7975 
7976   if (dump_file)
7977     dump_flow_info (dump_file, dump_flags);
7978 
7979   reg_scan (get_insns (), max_reg_num ());
7980 
7981   tem = cse_main (get_insns (), max_reg_num ());
7982   if (tem)
7983     rebuild_jump_labels (get_insns ());
7984   if (purge_all_dead_edges ())
7985     delete_unreachable_blocks ();
7986 
7987   delete_trivially_dead_insns (get_insns (), max_reg_num ());
7988 
7989   /* If we are not running more CSE passes, then we are no longer
7990      expecting CSE to be run.  But always rerun it in a cheap mode.  */
7991   cse_not_expected = !flag_rerun_cse_after_loop && !flag_gcse;
7992 
7993   if (tem)
7994     delete_dead_jumptables ();
7995 
7996   if (tem || optimize > 1)
7997     cleanup_cfg (CLEANUP_EXPENSIVE);
7998   return 0;
7999 }
8000 
8001 struct tree_opt_pass pass_cse =
8002 {
8003   "cse1",                               /* name */
8004   gate_handle_cse,                      /* gate */
8005   rest_of_handle_cse,			/* execute */
8006   NULL,                                 /* sub */
8007   NULL,                                 /* next */
8008   0,                                    /* static_pass_number */
8009   TV_CSE,                               /* tv_id */
8010   0,                                    /* properties_required */
8011   0,                                    /* properties_provided */
8012   0,                                    /* properties_destroyed */
8013   0,                                    /* todo_flags_start */
8014   TODO_dump_func |
8015   TODO_ggc_collect,                     /* todo_flags_finish */
8016   's'                                   /* letter */
8017 };
8018 
8019 
8020 static bool
gate_handle_cse2(void)8021 gate_handle_cse2 (void)
8022 {
8023   return optimize > 0 && flag_rerun_cse_after_loop;
8024 }
8025 
8026 /* Run second CSE pass after loop optimizations.  */
8027 static unsigned int
rest_of_handle_cse2(void)8028 rest_of_handle_cse2 (void)
8029 {
8030   int tem;
8031 
8032   if (dump_file)
8033     dump_flow_info (dump_file, dump_flags);
8034 
8035   tem = cse_main (get_insns (), max_reg_num ());
8036 
8037   /* Run a pass to eliminate duplicated assignments to condition code
8038      registers.  We have to run this after bypass_jumps, because it
8039      makes it harder for that pass to determine whether a jump can be
8040      bypassed safely.  */
8041   cse_condition_code_reg ();
8042 
8043   purge_all_dead_edges ();
8044   delete_trivially_dead_insns (get_insns (), max_reg_num ());
8045 
8046   if (tem)
8047     {
8048       timevar_push (TV_JUMP);
8049       rebuild_jump_labels (get_insns ());
8050       delete_dead_jumptables ();
8051       cleanup_cfg (CLEANUP_EXPENSIVE);
8052       timevar_pop (TV_JUMP);
8053     }
8054   reg_scan (get_insns (), max_reg_num ());
8055   cse_not_expected = 1;
8056   return 0;
8057 }
8058 
8059 
8060 struct tree_opt_pass pass_cse2 =
8061 {
8062   "cse2",                               /* name */
8063   gate_handle_cse2,                     /* gate */
8064   rest_of_handle_cse2,			/* execute */
8065   NULL,                                 /* sub */
8066   NULL,                                 /* next */
8067   0,                                    /* static_pass_number */
8068   TV_CSE2,                              /* tv_id */
8069   0,                                    /* properties_required */
8070   0,                                    /* properties_provided */
8071   0,                                    /* properties_destroyed */
8072   0,                                    /* todo_flags_start */
8073   TODO_dump_func |
8074   TODO_ggc_collect,                     /* todo_flags_finish */
8075   't'                                   /* letter */
8076 };
8077 
8078