1 /* Inline functions for tree-flow.h
2    Copyright (C) 2001-2013 Free Software Foundation, Inc.
3    Contributed by Diego Novillo <[email protected]>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #ifndef _TREE_FLOW_INLINE_H
22 #define _TREE_FLOW_INLINE_H 1
23 
24 /* Inline functions for manipulating various data structures defined in
25    tree-flow.h.  See tree-flow.h for documentation.  */
26 
27 /* Return true when gimple SSA form was built.
28    gimple_in_ssa_p is queried by gimplifier in various early stages before SSA
29    infrastructure is initialized.  Check for presence of the datastructures
30    at first place.  */
31 static inline bool
gimple_in_ssa_p(const struct function * fun)32 gimple_in_ssa_p (const struct function *fun)
33 {
34   return fun && fun->gimple_df && fun->gimple_df->in_ssa_p;
35 }
36 
37 /* Artificial variable used for the virtual operand FUD chain.  */
38 static inline tree
gimple_vop(const struct function * fun)39 gimple_vop (const struct function *fun)
40 {
41   gcc_checking_assert (fun && fun->gimple_df);
42   return fun->gimple_df->vop;
43 }
44 
45 /* Initialize the hashtable iterator HTI to point to hashtable TABLE */
46 
47 static inline void *
first_htab_element(htab_iterator * hti,htab_t table)48 first_htab_element (htab_iterator *hti, htab_t table)
49 {
50   hti->htab = table;
51   hti->slot = table->entries;
52   hti->limit = hti->slot + htab_size (table);
53   do
54     {
55       PTR x = *(hti->slot);
56       if (x != HTAB_EMPTY_ENTRY && x != HTAB_DELETED_ENTRY)
57 	break;
58     } while (++(hti->slot) < hti->limit);
59 
60   if (hti->slot < hti->limit)
61     return *(hti->slot);
62   return NULL;
63 }
64 
65 /* Return current non-empty/deleted slot of the hashtable pointed to by HTI,
66    or NULL if we have  reached the end.  */
67 
68 static inline bool
end_htab_p(const htab_iterator * hti)69 end_htab_p (const htab_iterator *hti)
70 {
71   if (hti->slot >= hti->limit)
72     return true;
73   return false;
74 }
75 
76 /* Advance the hashtable iterator pointed to by HTI to the next element of the
77    hashtable.  */
78 
79 static inline void *
next_htab_element(htab_iterator * hti)80 next_htab_element (htab_iterator *hti)
81 {
82   while (++(hti->slot) < hti->limit)
83     {
84       PTR x = *(hti->slot);
85       if (x != HTAB_EMPTY_ENTRY && x != HTAB_DELETED_ENTRY)
86 	return x;
87     };
88   return NULL;
89 }
90 
91 /* Get the number of the next statement uid to be allocated.  */
92 static inline unsigned int
gimple_stmt_max_uid(struct function * fn)93 gimple_stmt_max_uid (struct function *fn)
94 {
95   return fn->last_stmt_uid;
96 }
97 
98 /* Set the number of the next statement uid to be allocated.  */
99 static inline void
set_gimple_stmt_max_uid(struct function * fn,unsigned int maxid)100 set_gimple_stmt_max_uid (struct function *fn, unsigned int maxid)
101 {
102   fn->last_stmt_uid = maxid;
103 }
104 
105 /* Set the number of the next statement uid to be allocated.  */
106 static inline unsigned int
inc_gimple_stmt_max_uid(struct function * fn)107 inc_gimple_stmt_max_uid (struct function *fn)
108 {
109   return fn->last_stmt_uid++;
110 }
111 
112 /* Return the line number for EXPR, or return -1 if we have no line
113    number information for it.  */
114 static inline int
get_lineno(const_gimple stmt)115 get_lineno (const_gimple stmt)
116 {
117   location_t loc;
118 
119   if (!stmt)
120     return -1;
121 
122   loc = gimple_location (stmt);
123   if (loc == UNKNOWN_LOCATION)
124     return -1;
125 
126   return LOCATION_LINE (loc);
127 }
128 
129 /* Delink an immediate_uses node from its chain.  */
130 static inline void
delink_imm_use(ssa_use_operand_t * linknode)131 delink_imm_use (ssa_use_operand_t *linknode)
132 {
133   /* Return if this node is not in a list.  */
134   if (linknode->prev == NULL)
135     return;
136 
137   linknode->prev->next = linknode->next;
138   linknode->next->prev = linknode->prev;
139   linknode->prev = NULL;
140   linknode->next = NULL;
141 }
142 
143 /* Link ssa_imm_use node LINKNODE into the chain for LIST.  */
144 static inline void
link_imm_use_to_list(ssa_use_operand_t * linknode,ssa_use_operand_t * list)145 link_imm_use_to_list (ssa_use_operand_t *linknode, ssa_use_operand_t *list)
146 {
147   /* Link the new node at the head of the list.  If we are in the process of
148      traversing the list, we won't visit any new nodes added to it.  */
149   linknode->prev = list;
150   linknode->next = list->next;
151   list->next->prev = linknode;
152   list->next = linknode;
153 }
154 
155 /* Link ssa_imm_use node LINKNODE into the chain for DEF.  */
156 static inline void
link_imm_use(ssa_use_operand_t * linknode,tree def)157 link_imm_use (ssa_use_operand_t *linknode, tree def)
158 {
159   ssa_use_operand_t *root;
160 
161   if (!def || TREE_CODE (def) != SSA_NAME)
162     linknode->prev = NULL;
163   else
164     {
165       root = &(SSA_NAME_IMM_USE_NODE (def));
166       if (linknode->use)
167         gcc_checking_assert (*(linknode->use) == def);
168       link_imm_use_to_list (linknode, root);
169     }
170 }
171 
172 /* Set the value of a use pointed to by USE to VAL.  */
173 static inline void
set_ssa_use_from_ptr(use_operand_p use,tree val)174 set_ssa_use_from_ptr (use_operand_p use, tree val)
175 {
176   delink_imm_use (use);
177   *(use->use) = val;
178   link_imm_use (use, val);
179 }
180 
181 /* Link ssa_imm_use node LINKNODE into the chain for DEF, with use occurring
182    in STMT.  */
183 static inline void
link_imm_use_stmt(ssa_use_operand_t * linknode,tree def,gimple stmt)184 link_imm_use_stmt (ssa_use_operand_t *linknode, tree def, gimple stmt)
185 {
186   if (stmt)
187     link_imm_use (linknode, def);
188   else
189     link_imm_use (linknode, NULL);
190   linknode->loc.stmt = stmt;
191 }
192 
193 /* Relink a new node in place of an old node in the list.  */
194 static inline void
relink_imm_use(ssa_use_operand_t * node,ssa_use_operand_t * old)195 relink_imm_use (ssa_use_operand_t *node, ssa_use_operand_t *old)
196 {
197   /* The node one had better be in the same list.  */
198   gcc_checking_assert (*(old->use) == *(node->use));
199   node->prev = old->prev;
200   node->next = old->next;
201   if (old->prev)
202     {
203       old->prev->next = node;
204       old->next->prev = node;
205       /* Remove the old node from the list.  */
206       old->prev = NULL;
207     }
208 }
209 
210 /* Relink ssa_imm_use node LINKNODE into the chain for OLD, with use occurring
211    in STMT.  */
212 static inline void
relink_imm_use_stmt(ssa_use_operand_t * linknode,ssa_use_operand_t * old,gimple stmt)213 relink_imm_use_stmt (ssa_use_operand_t *linknode, ssa_use_operand_t *old,
214 		     gimple stmt)
215 {
216   if (stmt)
217     relink_imm_use (linknode, old);
218   else
219     link_imm_use (linknode, NULL);
220   linknode->loc.stmt = stmt;
221 }
222 
223 
224 /* Return true is IMM has reached the end of the immediate use list.  */
225 static inline bool
end_readonly_imm_use_p(const imm_use_iterator * imm)226 end_readonly_imm_use_p (const imm_use_iterator *imm)
227 {
228   return (imm->imm_use == imm->end_p);
229 }
230 
231 /* Initialize iterator IMM to process the list for VAR.  */
232 static inline use_operand_p
first_readonly_imm_use(imm_use_iterator * imm,tree var)233 first_readonly_imm_use (imm_use_iterator *imm, tree var)
234 {
235   imm->end_p = &(SSA_NAME_IMM_USE_NODE (var));
236   imm->imm_use = imm->end_p->next;
237 #ifdef ENABLE_CHECKING
238   imm->iter_node.next = imm->imm_use->next;
239 #endif
240   if (end_readonly_imm_use_p (imm))
241     return NULL_USE_OPERAND_P;
242   return imm->imm_use;
243 }
244 
245 /* Bump IMM to the next use in the list.  */
246 static inline use_operand_p
next_readonly_imm_use(imm_use_iterator * imm)247 next_readonly_imm_use (imm_use_iterator *imm)
248 {
249   use_operand_p old = imm->imm_use;
250 
251 #ifdef ENABLE_CHECKING
252   /* If this assertion fails, it indicates the 'next' pointer has changed
253      since the last bump.  This indicates that the list is being modified
254      via stmt changes, or SET_USE, or somesuch thing, and you need to be
255      using the SAFE version of the iterator.  */
256   gcc_assert (imm->iter_node.next == old->next);
257   imm->iter_node.next = old->next->next;
258 #endif
259 
260   imm->imm_use = old->next;
261   if (end_readonly_imm_use_p (imm))
262     return NULL_USE_OPERAND_P;
263   return imm->imm_use;
264 }
265 
266 /* tree-cfg.c */
267 extern bool has_zero_uses_1 (const ssa_use_operand_t *head);
268 extern bool single_imm_use_1 (const ssa_use_operand_t *head,
269 			      use_operand_p *use_p, gimple *stmt);
270 
271 /* Return true if VAR has no nondebug uses.  */
272 static inline bool
has_zero_uses(const_tree var)273 has_zero_uses (const_tree var)
274 {
275   const ssa_use_operand_t *const ptr = &(SSA_NAME_IMM_USE_NODE (var));
276 
277   /* A single use_operand means there is no items in the list.  */
278   if (ptr == ptr->next)
279     return true;
280 
281   /* If there are debug stmts, we have to look at each use and see
282      whether there are any nondebug uses.  */
283   if (!MAY_HAVE_DEBUG_STMTS)
284     return false;
285 
286   return has_zero_uses_1 (ptr);
287 }
288 
289 /* Return true if VAR has a single nondebug use.  */
290 static inline bool
has_single_use(const_tree var)291 has_single_use (const_tree var)
292 {
293   const ssa_use_operand_t *const ptr = &(SSA_NAME_IMM_USE_NODE (var));
294 
295   /* If there aren't any uses whatsoever, we're done.  */
296   if (ptr == ptr->next)
297     return false;
298 
299   /* If there's a single use, check that it's not a debug stmt.  */
300   if (ptr == ptr->next->next)
301     return !is_gimple_debug (USE_STMT (ptr->next));
302 
303   /* If there are debug stmts, we have to look at each of them.  */
304   if (!MAY_HAVE_DEBUG_STMTS)
305     return false;
306 
307   return single_imm_use_1 (ptr, NULL, NULL);
308 }
309 
310 
311 /* If VAR has only a single immediate nondebug use, return true, and
312    set USE_P and STMT to the use pointer and stmt of occurrence.  */
313 static inline bool
single_imm_use(const_tree var,use_operand_p * use_p,gimple * stmt)314 single_imm_use (const_tree var, use_operand_p *use_p, gimple *stmt)
315 {
316   const ssa_use_operand_t *const ptr = &(SSA_NAME_IMM_USE_NODE (var));
317 
318   /* If there aren't any uses whatsoever, we're done.  */
319   if (ptr == ptr->next)
320     {
321     return_false:
322       *use_p = NULL_USE_OPERAND_P;
323       *stmt = NULL;
324       return false;
325     }
326 
327   /* If there's a single use, check that it's not a debug stmt.  */
328   if (ptr == ptr->next->next)
329     {
330       if (!is_gimple_debug (USE_STMT (ptr->next)))
331 	{
332 	  *use_p = ptr->next;
333 	  *stmt = ptr->next->loc.stmt;
334 	  return true;
335 	}
336       else
337 	goto return_false;
338     }
339 
340   /* If there are debug stmts, we have to look at each of them.  */
341   if (!MAY_HAVE_DEBUG_STMTS)
342     goto return_false;
343 
344   return single_imm_use_1 (ptr, use_p, stmt);
345 }
346 
347 /* Return the number of nondebug immediate uses of VAR.  */
348 static inline unsigned int
num_imm_uses(const_tree var)349 num_imm_uses (const_tree var)
350 {
351   const ssa_use_operand_t *const start = &(SSA_NAME_IMM_USE_NODE (var));
352   const ssa_use_operand_t *ptr;
353   unsigned int num = 0;
354 
355   if (!MAY_HAVE_DEBUG_STMTS)
356     for (ptr = start->next; ptr != start; ptr = ptr->next)
357       num++;
358   else
359     for (ptr = start->next; ptr != start; ptr = ptr->next)
360       if (!is_gimple_debug (USE_STMT (ptr)))
361 	num++;
362 
363   return num;
364 }
365 
366 /* Return the tree pointed-to by USE.  */
367 static inline tree
get_use_from_ptr(use_operand_p use)368 get_use_from_ptr (use_operand_p use)
369 {
370   return *(use->use);
371 }
372 
373 /* Return the tree pointed-to by DEF.  */
374 static inline tree
get_def_from_ptr(def_operand_p def)375 get_def_from_ptr (def_operand_p def)
376 {
377   return *def;
378 }
379 
380 /* Return a use_operand_p pointer for argument I of PHI node GS.  */
381 
382 static inline use_operand_p
gimple_phi_arg_imm_use_ptr(gimple gs,int i)383 gimple_phi_arg_imm_use_ptr (gimple gs, int i)
384 {
385   return &gimple_phi_arg (gs, i)->imm_use;
386 }
387 
388 /* Return the tree operand for argument I of PHI node GS.  */
389 
390 static inline tree
gimple_phi_arg_def(gimple gs,size_t index)391 gimple_phi_arg_def (gimple gs, size_t index)
392 {
393   struct phi_arg_d *pd = gimple_phi_arg (gs, index);
394   return get_use_from_ptr (&pd->imm_use);
395 }
396 
397 /* Return a pointer to the tree operand for argument I of PHI node GS.  */
398 
399 static inline tree *
gimple_phi_arg_def_ptr(gimple gs,size_t index)400 gimple_phi_arg_def_ptr (gimple gs, size_t index)
401 {
402   return &gimple_phi_arg (gs, index)->def;
403 }
404 
405 /* Return the edge associated with argument I of phi node GS.  */
406 
407 static inline edge
gimple_phi_arg_edge(gimple gs,size_t i)408 gimple_phi_arg_edge (gimple gs, size_t i)
409 {
410   return EDGE_PRED (gimple_bb (gs), i);
411 }
412 
413 /* Return the source location of gimple argument I of phi node GS.  */
414 
415 static inline source_location
gimple_phi_arg_location(gimple gs,size_t i)416 gimple_phi_arg_location (gimple gs, size_t i)
417 {
418   return gimple_phi_arg (gs, i)->locus;
419 }
420 
421 /* Return the source location of the argument on edge E of phi node GS.  */
422 
423 static inline source_location
gimple_phi_arg_location_from_edge(gimple gs,edge e)424 gimple_phi_arg_location_from_edge (gimple gs, edge e)
425 {
426   return gimple_phi_arg (gs, e->dest_idx)->locus;
427 }
428 
429 /* Set the source location of gimple argument I of phi node GS to LOC.  */
430 
431 static inline void
gimple_phi_arg_set_location(gimple gs,size_t i,source_location loc)432 gimple_phi_arg_set_location (gimple gs, size_t i, source_location loc)
433 {
434   gimple_phi_arg (gs, i)->locus = loc;
435 }
436 
437 /* Return TRUE if argument I of phi node GS has a location record.  */
438 
439 static inline bool
gimple_phi_arg_has_location(gimple gs,size_t i)440 gimple_phi_arg_has_location (gimple gs, size_t i)
441 {
442   return gimple_phi_arg_location (gs, i) != UNKNOWN_LOCATION;
443 }
444 
445 
446 /* Return the PHI nodes for basic block BB, or NULL if there are no
447    PHI nodes.  */
448 static inline gimple_seq
phi_nodes(const_basic_block bb)449 phi_nodes (const_basic_block bb)
450 {
451   gcc_checking_assert (!(bb->flags & BB_RTL));
452   return bb->il.gimple.phi_nodes;
453 }
454 
455 static inline gimple_seq *
phi_nodes_ptr(basic_block bb)456 phi_nodes_ptr (basic_block bb)
457 {
458   gcc_checking_assert (!(bb->flags & BB_RTL));
459   return &bb->il.gimple.phi_nodes;
460 }
461 
462 /* Set PHI nodes of a basic block BB to SEQ.  */
463 
464 static inline void
set_phi_nodes(basic_block bb,gimple_seq seq)465 set_phi_nodes (basic_block bb, gimple_seq seq)
466 {
467   gimple_stmt_iterator i;
468 
469   gcc_checking_assert (!(bb->flags & BB_RTL));
470   bb->il.gimple.phi_nodes = seq;
471   if (seq)
472     for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
473       gimple_set_bb (gsi_stmt (i), bb);
474 }
475 
476 /* Return the phi argument which contains the specified use.  */
477 
478 static inline int
phi_arg_index_from_use(use_operand_p use)479 phi_arg_index_from_use (use_operand_p use)
480 {
481   struct phi_arg_d *element, *root;
482   size_t index;
483   gimple phi;
484 
485   /* Since the use is the first thing in a PHI argument element, we can
486      calculate its index based on casting it to an argument, and performing
487      pointer arithmetic.  */
488 
489   phi = USE_STMT (use);
490 
491   element = (struct phi_arg_d *)use;
492   root = gimple_phi_arg (phi, 0);
493   index = element - root;
494 
495   /* Make sure the calculation doesn't have any leftover bytes.  If it does,
496      then imm_use is likely not the first element in phi_arg_d.  */
497   gcc_checking_assert ((((char *)element - (char *)root)
498 			% sizeof (struct phi_arg_d)) == 0
499 		       && index < gimple_phi_capacity (phi));
500 
501  return index;
502 }
503 
504 /* Return true if T (assumed to be a DECL) is a global variable.
505    A variable is considered global if its storage is not automatic.  */
506 
507 static inline bool
is_global_var(const_tree t)508 is_global_var (const_tree t)
509 {
510   return (TREE_STATIC (t) || DECL_EXTERNAL (t));
511 }
512 
513 
514 /* Return true if VAR may be aliased.  A variable is considered as
515    maybe aliased if it has its address taken by the local TU
516    or possibly by another TU and might be modified through a pointer.  */
517 
518 static inline bool
may_be_aliased(const_tree var)519 may_be_aliased (const_tree var)
520 {
521   return (TREE_CODE (var) != CONST_DECL
522 	  && !((TREE_STATIC (var) || TREE_PUBLIC (var) || DECL_EXTERNAL (var))
523 	       && TREE_READONLY (var)
524 	       && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (var)))
525 	  && (TREE_PUBLIC (var)
526 	      || DECL_EXTERNAL (var)
527 	      || TREE_ADDRESSABLE (var)));
528 }
529 
530 
531 /* PHI nodes should contain only ssa_names and invariants.  A test
532    for ssa_name is definitely simpler; don't let invalid contents
533    slip in in the meantime.  */
534 
535 static inline bool
phi_ssa_name_p(const_tree t)536 phi_ssa_name_p (const_tree t)
537 {
538   if (TREE_CODE (t) == SSA_NAME)
539     return true;
540   gcc_checking_assert (is_gimple_min_invariant (t));
541   return false;
542 }
543 
544 
545 /* Returns the loop of the statement STMT.  */
546 
547 static inline struct loop *
loop_containing_stmt(gimple stmt)548 loop_containing_stmt (gimple stmt)
549 {
550   basic_block bb = gimple_bb (stmt);
551   if (!bb)
552     return NULL;
553 
554   return bb->loop_father;
555 }
556 
557 
558 /*  -----------------------------------------------------------------------  */
559 
560 /* The following set of routines are used to iterator over various type of
561    SSA operands.  */
562 
563 /* Return true if PTR is finished iterating.  */
564 static inline bool
op_iter_done(const ssa_op_iter * ptr)565 op_iter_done (const ssa_op_iter *ptr)
566 {
567   return ptr->done;
568 }
569 
570 /* Get the next iterator use value for PTR.  */
571 static inline use_operand_p
op_iter_next_use(ssa_op_iter * ptr)572 op_iter_next_use (ssa_op_iter *ptr)
573 {
574   use_operand_p use_p;
575   gcc_checking_assert (ptr->iter_type == ssa_op_iter_use);
576   if (ptr->uses)
577     {
578       use_p = USE_OP_PTR (ptr->uses);
579       ptr->uses = ptr->uses->next;
580       return use_p;
581     }
582   if (ptr->i < ptr->numops)
583     {
584       return PHI_ARG_DEF_PTR (ptr->stmt, (ptr->i)++);
585     }
586   ptr->done = true;
587   return NULL_USE_OPERAND_P;
588 }
589 
590 /* Get the next iterator def value for PTR.  */
591 static inline def_operand_p
op_iter_next_def(ssa_op_iter * ptr)592 op_iter_next_def (ssa_op_iter *ptr)
593 {
594   gcc_checking_assert (ptr->iter_type == ssa_op_iter_def);
595   if (ptr->flags & SSA_OP_VDEF)
596     {
597       tree *p;
598       ptr->flags &= ~SSA_OP_VDEF;
599       p = gimple_vdef_ptr (ptr->stmt);
600       if (p && *p)
601 	return p;
602     }
603   if (ptr->flags & SSA_OP_DEF)
604     {
605       while (ptr->i < ptr->numops)
606 	{
607 	  tree *val = gimple_op_ptr (ptr->stmt, ptr->i);
608 	  ptr->i++;
609 	  if (*val)
610 	    {
611 	      if (TREE_CODE (*val) == TREE_LIST)
612 		val = &TREE_VALUE (*val);
613 	      if (TREE_CODE (*val) == SSA_NAME
614 		  || is_gimple_reg (*val))
615 		return val;
616 	    }
617 	}
618       ptr->flags &= ~SSA_OP_DEF;
619     }
620 
621   ptr->done = true;
622   return NULL_DEF_OPERAND_P;
623 }
624 
625 /* Get the next iterator tree value for PTR.  */
626 static inline tree
op_iter_next_tree(ssa_op_iter * ptr)627 op_iter_next_tree (ssa_op_iter *ptr)
628 {
629   tree val;
630   gcc_checking_assert (ptr->iter_type == ssa_op_iter_tree);
631   if (ptr->uses)
632     {
633       val = USE_OP (ptr->uses);
634       ptr->uses = ptr->uses->next;
635       return val;
636     }
637   if (ptr->flags & SSA_OP_VDEF)
638     {
639       ptr->flags &= ~SSA_OP_VDEF;
640       if ((val = gimple_vdef (ptr->stmt)))
641 	return val;
642     }
643   if (ptr->flags & SSA_OP_DEF)
644     {
645       while (ptr->i < ptr->numops)
646 	{
647 	  val = gimple_op (ptr->stmt, ptr->i);
648 	  ptr->i++;
649 	  if (val)
650 	    {
651 	      if (TREE_CODE (val) == TREE_LIST)
652 		val = TREE_VALUE (val);
653 	      if (TREE_CODE (val) == SSA_NAME
654 		  || is_gimple_reg (val))
655 		return val;
656 	    }
657 	}
658       ptr->flags &= ~SSA_OP_DEF;
659     }
660 
661   ptr->done = true;
662   return NULL_TREE;
663 }
664 
665 
666 /* This functions clears the iterator PTR, and marks it done.  This is normally
667    used to prevent warnings in the compile about might be uninitialized
668    components.  */
669 
670 static inline void
clear_and_done_ssa_iter(ssa_op_iter * ptr)671 clear_and_done_ssa_iter (ssa_op_iter *ptr)
672 {
673   ptr->i = 0;
674   ptr->numops = 0;
675   ptr->uses = NULL;
676   ptr->iter_type = ssa_op_iter_none;
677   ptr->stmt = NULL;
678   ptr->done = true;
679   ptr->flags = 0;
680 }
681 
682 /* Initialize the iterator PTR to the virtual defs in STMT.  */
683 static inline void
op_iter_init(ssa_op_iter * ptr,gimple stmt,int flags)684 op_iter_init (ssa_op_iter *ptr, gimple stmt, int flags)
685 {
686   /* PHI nodes require a different iterator initialization path.  We
687      do not support iterating over virtual defs or uses without
688      iterating over defs or uses at the same time.  */
689   gcc_checking_assert (gimple_code (stmt) != GIMPLE_PHI
690 		       && (!(flags & SSA_OP_VDEF) || (flags & SSA_OP_DEF))
691 		       && (!(flags & SSA_OP_VUSE) || (flags & SSA_OP_USE)));
692   ptr->numops = 0;
693   if (flags & (SSA_OP_DEF | SSA_OP_VDEF))
694     {
695       switch (gimple_code (stmt))
696 	{
697 	  case GIMPLE_ASSIGN:
698 	  case GIMPLE_CALL:
699 	    ptr->numops = 1;
700 	    break;
701 	  case GIMPLE_ASM:
702 	    ptr->numops = gimple_asm_noutputs (stmt);
703 	    break;
704 	  default:
705 	    ptr->numops = 0;
706 	    flags &= ~(SSA_OP_DEF | SSA_OP_VDEF);
707 	    break;
708 	}
709     }
710   ptr->uses = (flags & (SSA_OP_USE|SSA_OP_VUSE)) ? gimple_use_ops (stmt) : NULL;
711   if (!(flags & SSA_OP_VUSE)
712       && ptr->uses
713       && gimple_vuse (stmt) != NULL_TREE)
714     ptr->uses = ptr->uses->next;
715   ptr->done = false;
716   ptr->i = 0;
717 
718   ptr->stmt = stmt;
719   ptr->flags = flags;
720 }
721 
722 /* Initialize iterator PTR to the use operands in STMT based on FLAGS. Return
723    the first use.  */
724 static inline use_operand_p
op_iter_init_use(ssa_op_iter * ptr,gimple stmt,int flags)725 op_iter_init_use (ssa_op_iter *ptr, gimple stmt, int flags)
726 {
727   gcc_checking_assert ((flags & SSA_OP_ALL_DEFS) == 0
728 		       && (flags & SSA_OP_USE));
729   op_iter_init (ptr, stmt, flags);
730   ptr->iter_type = ssa_op_iter_use;
731   return op_iter_next_use (ptr);
732 }
733 
734 /* Initialize iterator PTR to the def operands in STMT based on FLAGS. Return
735    the first def.  */
736 static inline def_operand_p
op_iter_init_def(ssa_op_iter * ptr,gimple stmt,int flags)737 op_iter_init_def (ssa_op_iter *ptr, gimple stmt, int flags)
738 {
739   gcc_checking_assert ((flags & SSA_OP_ALL_USES) == 0
740 		       && (flags & SSA_OP_DEF));
741   op_iter_init (ptr, stmt, flags);
742   ptr->iter_type = ssa_op_iter_def;
743   return op_iter_next_def (ptr);
744 }
745 
746 /* Initialize iterator PTR to the operands in STMT based on FLAGS. Return
747    the first operand as a tree.  */
748 static inline tree
op_iter_init_tree(ssa_op_iter * ptr,gimple stmt,int flags)749 op_iter_init_tree (ssa_op_iter *ptr, gimple stmt, int flags)
750 {
751   op_iter_init (ptr, stmt, flags);
752   ptr->iter_type = ssa_op_iter_tree;
753   return op_iter_next_tree (ptr);
754 }
755 
756 
757 /* If there is a single operand in STMT matching FLAGS, return it.  Otherwise
758    return NULL.  */
759 static inline tree
single_ssa_tree_operand(gimple stmt,int flags)760 single_ssa_tree_operand (gimple stmt, int flags)
761 {
762   tree var;
763   ssa_op_iter iter;
764 
765   var = op_iter_init_tree (&iter, stmt, flags);
766   if (op_iter_done (&iter))
767     return NULL_TREE;
768   op_iter_next_tree (&iter);
769   if (op_iter_done (&iter))
770     return var;
771   return NULL_TREE;
772 }
773 
774 
775 /* If there is a single operand in STMT matching FLAGS, return it.  Otherwise
776    return NULL.  */
777 static inline use_operand_p
single_ssa_use_operand(gimple stmt,int flags)778 single_ssa_use_operand (gimple stmt, int flags)
779 {
780   use_operand_p var;
781   ssa_op_iter iter;
782 
783   var = op_iter_init_use (&iter, stmt, flags);
784   if (op_iter_done (&iter))
785     return NULL_USE_OPERAND_P;
786   op_iter_next_use (&iter);
787   if (op_iter_done (&iter))
788     return var;
789   return NULL_USE_OPERAND_P;
790 }
791 
792 
793 
794 /* If there is a single operand in STMT matching FLAGS, return it.  Otherwise
795    return NULL.  */
796 static inline def_operand_p
single_ssa_def_operand(gimple stmt,int flags)797 single_ssa_def_operand (gimple stmt, int flags)
798 {
799   def_operand_p var;
800   ssa_op_iter iter;
801 
802   var = op_iter_init_def (&iter, stmt, flags);
803   if (op_iter_done (&iter))
804     return NULL_DEF_OPERAND_P;
805   op_iter_next_def (&iter);
806   if (op_iter_done (&iter))
807     return var;
808   return NULL_DEF_OPERAND_P;
809 }
810 
811 
812 /* Return true if there are zero operands in STMT matching the type
813    given in FLAGS.  */
814 static inline bool
zero_ssa_operands(gimple stmt,int flags)815 zero_ssa_operands (gimple stmt, int flags)
816 {
817   ssa_op_iter iter;
818 
819   op_iter_init_tree (&iter, stmt, flags);
820   return op_iter_done (&iter);
821 }
822 
823 
824 /* Return the number of operands matching FLAGS in STMT.  */
825 static inline int
num_ssa_operands(gimple stmt,int flags)826 num_ssa_operands (gimple stmt, int flags)
827 {
828   ssa_op_iter iter;
829   tree t;
830   int num = 0;
831 
832   gcc_checking_assert (gimple_code (stmt) != GIMPLE_PHI);
833   FOR_EACH_SSA_TREE_OPERAND (t, stmt, iter, flags)
834     num++;
835   return num;
836 }
837 
838 static inline use_operand_p
839 op_iter_init_phiuse (ssa_op_iter *ptr, gimple phi, int flags);
840 
841 /* Delink all immediate_use information for STMT.  */
842 static inline void
delink_stmt_imm_use(gimple stmt)843 delink_stmt_imm_use (gimple stmt)
844 {
845    ssa_op_iter iter;
846    use_operand_p use_p;
847 
848    if (ssa_operands_active (cfun))
849      FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_ALL_USES)
850        delink_imm_use (use_p);
851 }
852 
853 
854 /* If there is a single DEF in the PHI node which matches FLAG, return it.
855    Otherwise return NULL_DEF_OPERAND_P.  */
856 static inline tree
single_phi_def(gimple stmt,int flags)857 single_phi_def (gimple stmt, int flags)
858 {
859   tree def = PHI_RESULT (stmt);
860   if ((flags & SSA_OP_DEF) && is_gimple_reg (def))
861     return def;
862   if ((flags & SSA_OP_VIRTUAL_DEFS) && !is_gimple_reg (def))
863     return def;
864   return NULL_TREE;
865 }
866 
867 /* Initialize the iterator PTR for uses matching FLAGS in PHI.  FLAGS should
868    be either SSA_OP_USES or SSA_OP_VIRTUAL_USES.  */
869 static inline use_operand_p
op_iter_init_phiuse(ssa_op_iter * ptr,gimple phi,int flags)870 op_iter_init_phiuse (ssa_op_iter *ptr, gimple phi, int flags)
871 {
872   tree phi_def = gimple_phi_result (phi);
873   int comp;
874 
875   clear_and_done_ssa_iter (ptr);
876   ptr->done = false;
877 
878   gcc_checking_assert ((flags & (SSA_OP_USE | SSA_OP_VIRTUAL_USES)) != 0);
879 
880   comp = (is_gimple_reg (phi_def) ? SSA_OP_USE : SSA_OP_VIRTUAL_USES);
881 
882   /* If the PHI node doesn't the operand type we care about, we're done.  */
883   if ((flags & comp) == 0)
884     {
885       ptr->done = true;
886       return NULL_USE_OPERAND_P;
887     }
888 
889   ptr->stmt = phi;
890   ptr->numops = gimple_phi_num_args (phi);
891   ptr->iter_type = ssa_op_iter_use;
892   ptr->flags = flags;
893   return op_iter_next_use (ptr);
894 }
895 
896 
897 /* Start an iterator for a PHI definition.  */
898 
899 static inline def_operand_p
op_iter_init_phidef(ssa_op_iter * ptr,gimple phi,int flags)900 op_iter_init_phidef (ssa_op_iter *ptr, gimple phi, int flags)
901 {
902   tree phi_def = PHI_RESULT (phi);
903   int comp;
904 
905   clear_and_done_ssa_iter (ptr);
906   ptr->done = false;
907 
908   gcc_checking_assert ((flags & (SSA_OP_DEF | SSA_OP_VIRTUAL_DEFS)) != 0);
909 
910   comp = (is_gimple_reg (phi_def) ? SSA_OP_DEF : SSA_OP_VIRTUAL_DEFS);
911 
912   /* If the PHI node doesn't have the operand type we care about,
913      we're done.  */
914   if ((flags & comp) == 0)
915     {
916       ptr->done = true;
917       return NULL_DEF_OPERAND_P;
918     }
919 
920   ptr->iter_type = ssa_op_iter_def;
921   /* The first call to op_iter_next_def will terminate the iterator since
922      all the fields are NULL.  Simply return the result here as the first and
923      therefore only result.  */
924   return PHI_RESULT_PTR (phi);
925 }
926 
927 /* Return true is IMM has reached the end of the immediate use stmt list.  */
928 
929 static inline bool
end_imm_use_stmt_p(const imm_use_iterator * imm)930 end_imm_use_stmt_p (const imm_use_iterator *imm)
931 {
932   return (imm->imm_use == imm->end_p);
933 }
934 
935 /* Finished the traverse of an immediate use stmt list IMM by removing the
936    placeholder node from the list.  */
937 
938 static inline void
end_imm_use_stmt_traverse(imm_use_iterator * imm)939 end_imm_use_stmt_traverse (imm_use_iterator *imm)
940 {
941   delink_imm_use (&(imm->iter_node));
942 }
943 
944 /* Immediate use traversal of uses within a stmt require that all the
945    uses on a stmt be sequentially listed.  This routine is used to build up
946    this sequential list by adding USE_P to the end of the current list
947    currently delimited by HEAD and LAST_P.  The new LAST_P value is
948    returned.  */
949 
950 static inline use_operand_p
move_use_after_head(use_operand_p use_p,use_operand_p head,use_operand_p last_p)951 move_use_after_head (use_operand_p use_p, use_operand_p head,
952 		      use_operand_p last_p)
953 {
954   gcc_checking_assert (USE_FROM_PTR (use_p) == USE_FROM_PTR (head));
955   /* Skip head when we find it.  */
956   if (use_p != head)
957     {
958       /* If use_p is already linked in after last_p, continue.  */
959       if (last_p->next == use_p)
960 	last_p = use_p;
961       else
962 	{
963 	  /* Delink from current location, and link in at last_p.  */
964 	  delink_imm_use (use_p);
965 	  link_imm_use_to_list (use_p, last_p);
966 	  last_p = use_p;
967 	}
968     }
969   return last_p;
970 }
971 
972 
973 /* This routine will relink all uses with the same stmt as HEAD into the list
974    immediately following HEAD for iterator IMM.  */
975 
976 static inline void
link_use_stmts_after(use_operand_p head,imm_use_iterator * imm)977 link_use_stmts_after (use_operand_p head, imm_use_iterator *imm)
978 {
979   use_operand_p use_p;
980   use_operand_p last_p = head;
981   gimple head_stmt = USE_STMT (head);
982   tree use = USE_FROM_PTR (head);
983   ssa_op_iter op_iter;
984   int flag;
985 
986   /* Only look at virtual or real uses, depending on the type of HEAD.  */
987   flag = (is_gimple_reg (use) ? SSA_OP_USE : SSA_OP_VIRTUAL_USES);
988 
989   if (gimple_code (head_stmt) == GIMPLE_PHI)
990     {
991       FOR_EACH_PHI_ARG (use_p, head_stmt, op_iter, flag)
992 	if (USE_FROM_PTR (use_p) == use)
993 	  last_p = move_use_after_head (use_p, head, last_p);
994     }
995   else
996     {
997       if (flag == SSA_OP_USE)
998 	{
999 	  FOR_EACH_SSA_USE_OPERAND (use_p, head_stmt, op_iter, flag)
1000 	    if (USE_FROM_PTR (use_p) == use)
1001 	      last_p = move_use_after_head (use_p, head, last_p);
1002 	}
1003       else if ((use_p = gimple_vuse_op (head_stmt)) != NULL_USE_OPERAND_P)
1004 	{
1005 	  if (USE_FROM_PTR (use_p) == use)
1006 	    last_p = move_use_after_head (use_p, head, last_p);
1007 	}
1008     }
1009   /* Link iter node in after last_p.  */
1010   if (imm->iter_node.prev != NULL)
1011     delink_imm_use (&imm->iter_node);
1012   link_imm_use_to_list (&(imm->iter_node), last_p);
1013 }
1014 
1015 /* Initialize IMM to traverse over uses of VAR.  Return the first statement.  */
1016 static inline gimple
first_imm_use_stmt(imm_use_iterator * imm,tree var)1017 first_imm_use_stmt (imm_use_iterator *imm, tree var)
1018 {
1019   imm->end_p = &(SSA_NAME_IMM_USE_NODE (var));
1020   imm->imm_use = imm->end_p->next;
1021   imm->next_imm_name = NULL_USE_OPERAND_P;
1022 
1023   /* iter_node is used as a marker within the immediate use list to indicate
1024      where the end of the current stmt's uses are.  Initialize it to NULL
1025      stmt and use, which indicates a marker node.  */
1026   imm->iter_node.prev = NULL_USE_OPERAND_P;
1027   imm->iter_node.next = NULL_USE_OPERAND_P;
1028   imm->iter_node.loc.stmt = NULL;
1029   imm->iter_node.use = NULL;
1030 
1031   if (end_imm_use_stmt_p (imm))
1032     return NULL;
1033 
1034   link_use_stmts_after (imm->imm_use, imm);
1035 
1036   return USE_STMT (imm->imm_use);
1037 }
1038 
1039 /* Bump IMM to the next stmt which has a use of var.  */
1040 
1041 static inline gimple
next_imm_use_stmt(imm_use_iterator * imm)1042 next_imm_use_stmt (imm_use_iterator *imm)
1043 {
1044   imm->imm_use = imm->iter_node.next;
1045   if (end_imm_use_stmt_p (imm))
1046     {
1047       if (imm->iter_node.prev != NULL)
1048 	delink_imm_use (&imm->iter_node);
1049       return NULL;
1050     }
1051 
1052   link_use_stmts_after (imm->imm_use, imm);
1053   return USE_STMT (imm->imm_use);
1054 }
1055 
1056 /* This routine will return the first use on the stmt IMM currently refers
1057    to.  */
1058 
1059 static inline use_operand_p
first_imm_use_on_stmt(imm_use_iterator * imm)1060 first_imm_use_on_stmt (imm_use_iterator *imm)
1061 {
1062   imm->next_imm_name = imm->imm_use->next;
1063   return imm->imm_use;
1064 }
1065 
1066 /*  Return TRUE if the last use on the stmt IMM refers to has been visited.  */
1067 
1068 static inline bool
end_imm_use_on_stmt_p(const imm_use_iterator * imm)1069 end_imm_use_on_stmt_p (const imm_use_iterator *imm)
1070 {
1071   return (imm->imm_use == &(imm->iter_node));
1072 }
1073 
1074 /* Bump to the next use on the stmt IMM refers to, return NULL if done.  */
1075 
1076 static inline use_operand_p
next_imm_use_on_stmt(imm_use_iterator * imm)1077 next_imm_use_on_stmt (imm_use_iterator *imm)
1078 {
1079   imm->imm_use = imm->next_imm_name;
1080   if (end_imm_use_on_stmt_p (imm))
1081     return NULL_USE_OPERAND_P;
1082   else
1083     {
1084       imm->next_imm_name = imm->imm_use->next;
1085       return imm->imm_use;
1086     }
1087 }
1088 
1089 /* Return true if VAR cannot be modified by the program.  */
1090 
1091 static inline bool
unmodifiable_var_p(const_tree var)1092 unmodifiable_var_p (const_tree var)
1093 {
1094   if (TREE_CODE (var) == SSA_NAME)
1095     var = SSA_NAME_VAR (var);
1096 
1097   return TREE_READONLY (var) && (TREE_STATIC (var) || DECL_EXTERNAL (var));
1098 }
1099 
1100 /* Return true if REF, a handled component reference, has an ARRAY_REF
1101    somewhere in it.  */
1102 
1103 static inline bool
ref_contains_array_ref(const_tree ref)1104 ref_contains_array_ref (const_tree ref)
1105 {
1106   gcc_checking_assert (handled_component_p (ref));
1107 
1108   do {
1109     if (TREE_CODE (ref) == ARRAY_REF)
1110       return true;
1111     ref = TREE_OPERAND (ref, 0);
1112   } while (handled_component_p (ref));
1113 
1114   return false;
1115 }
1116 
1117 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it.  */
1118 
1119 static inline bool
contains_view_convert_expr_p(const_tree ref)1120 contains_view_convert_expr_p (const_tree ref)
1121 {
1122   while (handled_component_p (ref))
1123     {
1124       if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
1125 	return true;
1126       ref = TREE_OPERAND (ref, 0);
1127     }
1128 
1129   return false;
1130 }
1131 
1132 /* Return true, if the two ranges [POS1, SIZE1] and [POS2, SIZE2]
1133    overlap.  SIZE1 and/or SIZE2 can be (unsigned)-1 in which case the
1134    range is open-ended.  Otherwise return false.  */
1135 
1136 static inline bool
ranges_overlap_p(unsigned HOST_WIDE_INT pos1,unsigned HOST_WIDE_INT size1,unsigned HOST_WIDE_INT pos2,unsigned HOST_WIDE_INT size2)1137 ranges_overlap_p (unsigned HOST_WIDE_INT pos1,
1138 		  unsigned HOST_WIDE_INT size1,
1139 		  unsigned HOST_WIDE_INT pos2,
1140 		  unsigned HOST_WIDE_INT size2)
1141 {
1142   if (pos1 >= pos2
1143       && (size2 == (unsigned HOST_WIDE_INT)-1
1144 	  || pos1 < (pos2 + size2)))
1145     return true;
1146   if (pos2 >= pos1
1147       && (size1 == (unsigned HOST_WIDE_INT)-1
1148 	  || pos2 < (pos1 + size1)))
1149     return true;
1150 
1151   return false;
1152 }
1153 
1154 /* Accessor to tree-ssa-operands.c caches.  */
1155 static inline struct ssa_operands *
gimple_ssa_operands(const struct function * fun)1156 gimple_ssa_operands (const struct function *fun)
1157 {
1158   return &fun->gimple_df->ssa_operands;
1159 }
1160 
1161 /* Given an edge_var_map V, return the PHI arg definition.  */
1162 
1163 static inline tree
redirect_edge_var_map_def(edge_var_map * v)1164 redirect_edge_var_map_def (edge_var_map *v)
1165 {
1166   return v->def;
1167 }
1168 
1169 /* Given an edge_var_map V, return the PHI result.  */
1170 
1171 static inline tree
redirect_edge_var_map_result(edge_var_map * v)1172 redirect_edge_var_map_result (edge_var_map *v)
1173 {
1174   return v->result;
1175 }
1176 
1177 /* Given an edge_var_map V, return the PHI arg location.  */
1178 
1179 static inline source_location
redirect_edge_var_map_location(edge_var_map * v)1180 redirect_edge_var_map_location (edge_var_map *v)
1181 {
1182   return v->locus;
1183 }
1184 
1185 
1186 /* Return an SSA_NAME node for variable VAR defined in statement STMT
1187    in function cfun.  */
1188 
1189 static inline tree
make_ssa_name(tree var,gimple stmt)1190 make_ssa_name (tree var, gimple stmt)
1191 {
1192   return make_ssa_name_fn (cfun, var, stmt);
1193 }
1194 
1195 /* Return an SSA_NAME node using the template SSA name NAME defined in
1196    statement STMT in function cfun.  */
1197 
1198 static inline tree
copy_ssa_name(tree var,gimple stmt)1199 copy_ssa_name (tree var, gimple stmt)
1200 {
1201   return copy_ssa_name_fn (cfun, var, stmt);
1202 }
1203 
1204 /*  Creates a duplicate of a SSA name NAME tobe defined by statement STMT
1205     in function cfun.  */
1206 
1207 static inline tree
duplicate_ssa_name(tree var,gimple stmt)1208 duplicate_ssa_name (tree var, gimple stmt)
1209 {
1210   return duplicate_ssa_name_fn (cfun, var, stmt);
1211 }
1212 
1213 /* Return an anonymous SSA_NAME node for type TYPE defined in statement STMT
1214    in function cfun.  Arrange so that it uses NAME in dumps.  */
1215 
1216 static inline tree
make_temp_ssa_name(tree type,gimple stmt,const char * name)1217 make_temp_ssa_name (tree type, gimple stmt, const char *name)
1218 {
1219   tree ssa_name;
1220   gcc_checking_assert (TYPE_P (type));
1221   ssa_name = make_ssa_name_fn (cfun, type, stmt);
1222   SET_SSA_NAME_VAR_OR_IDENTIFIER (ssa_name, get_identifier (name));
1223   return ssa_name;
1224 }
1225 
1226 /* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
1227    denotes the starting address of the memory access EXP.
1228    Returns NULL_TREE if the offset is not constant or any component
1229    is not BITS_PER_UNIT-aligned.
1230    VALUEIZE if non-NULL is used to valueize SSA names.  It should return
1231    its argument or a constant if the argument is known to be constant.  */
1232 /* ??? This is a static inline here to avoid the overhead of the indirect calls
1233    to VALUEIZE.  But is this overhead really that significant?  And should we
1234    perhaps just rely on WHOPR to specialize the function?  */
1235 
1236 static inline tree
get_addr_base_and_unit_offset_1(tree exp,HOST_WIDE_INT * poffset,tree (* valueize)(tree))1237 get_addr_base_and_unit_offset_1 (tree exp, HOST_WIDE_INT *poffset,
1238 				 tree (*valueize) (tree))
1239 {
1240   HOST_WIDE_INT byte_offset = 0;
1241 
1242   /* Compute cumulative byte-offset for nested component-refs and array-refs,
1243      and find the ultimate containing object.  */
1244   while (1)
1245     {
1246       switch (TREE_CODE (exp))
1247 	{
1248 	case BIT_FIELD_REF:
1249 	  return NULL_TREE;
1250 
1251 	case COMPONENT_REF:
1252 	  {
1253 	    tree field = TREE_OPERAND (exp, 1);
1254 	    tree this_offset = component_ref_field_offset (exp);
1255 	    HOST_WIDE_INT hthis_offset;
1256 
1257 	    if (!this_offset
1258 		|| TREE_CODE (this_offset) != INTEGER_CST
1259 		|| (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
1260 		    % BITS_PER_UNIT))
1261 	      return NULL_TREE;
1262 
1263 	    hthis_offset = TREE_INT_CST_LOW (this_offset);
1264 	    hthis_offset += (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
1265 			     / BITS_PER_UNIT);
1266 	    byte_offset += hthis_offset;
1267 	  }
1268 	  break;
1269 
1270 	case ARRAY_REF:
1271 	case ARRAY_RANGE_REF:
1272 	  {
1273 	    tree index = TREE_OPERAND (exp, 1);
1274 	    tree low_bound, unit_size;
1275 
1276 	    if (valueize
1277 		&& TREE_CODE (index) == SSA_NAME)
1278 	      index = (*valueize) (index);
1279 
1280 	    /* If the resulting bit-offset is constant, track it.  */
1281 	    if (TREE_CODE (index) == INTEGER_CST
1282 		&& (low_bound = array_ref_low_bound (exp),
1283 		    TREE_CODE (low_bound) == INTEGER_CST)
1284 		&& (unit_size = array_ref_element_size (exp),
1285 		    TREE_CODE (unit_size) == INTEGER_CST))
1286 	      {
1287 		HOST_WIDE_INT hindex = TREE_INT_CST_LOW (index);
1288 
1289 		hindex -= TREE_INT_CST_LOW (low_bound);
1290 		hindex *= TREE_INT_CST_LOW (unit_size);
1291 		byte_offset += hindex;
1292 	      }
1293 	    else
1294 	      return NULL_TREE;
1295 	  }
1296 	  break;
1297 
1298 	case REALPART_EXPR:
1299 	  break;
1300 
1301 	case IMAGPART_EXPR:
1302 	  byte_offset += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (exp)));
1303 	  break;
1304 
1305 	case VIEW_CONVERT_EXPR:
1306 	  break;
1307 
1308 	case MEM_REF:
1309 	  {
1310 	    tree base = TREE_OPERAND (exp, 0);
1311 	    if (valueize
1312 		&& TREE_CODE (base) == SSA_NAME)
1313 	      base = (*valueize) (base);
1314 
1315 	    /* Hand back the decl for MEM[&decl, off].  */
1316 	    if (TREE_CODE (base) == ADDR_EXPR)
1317 	      {
1318 		if (!integer_zerop (TREE_OPERAND (exp, 1)))
1319 		  {
1320 		    double_int off = mem_ref_offset (exp);
1321 		    gcc_assert (off.high == -1 || off.high == 0);
1322 		    byte_offset += off.to_shwi ();
1323 		  }
1324 		exp = TREE_OPERAND (base, 0);
1325 	      }
1326 	    goto done;
1327 	  }
1328 
1329 	case TARGET_MEM_REF:
1330 	  {
1331 	    tree base = TREE_OPERAND (exp, 0);
1332 	    if (valueize
1333 		&& TREE_CODE (base) == SSA_NAME)
1334 	      base = (*valueize) (base);
1335 
1336 	    /* Hand back the decl for MEM[&decl, off].  */
1337 	    if (TREE_CODE (base) == ADDR_EXPR)
1338 	      {
1339 		if (TMR_INDEX (exp) || TMR_INDEX2 (exp))
1340 		  return NULL_TREE;
1341 		if (!integer_zerop (TMR_OFFSET (exp)))
1342 		  {
1343 		    double_int off = mem_ref_offset (exp);
1344 		    gcc_assert (off.high == -1 || off.high == 0);
1345 		    byte_offset += off.to_shwi ();
1346 		  }
1347 		exp = TREE_OPERAND (base, 0);
1348 	      }
1349 	    goto done;
1350 	  }
1351 
1352 	default:
1353 	  goto done;
1354 	}
1355 
1356       exp = TREE_OPERAND (exp, 0);
1357     }
1358 done:
1359 
1360   *poffset = byte_offset;
1361   return exp;
1362 }
1363 
1364 #endif /* _TREE_FLOW_INLINE_H  */
1365