Lines Matching refs:b

197 #define SET_INTERSECT(a, b, n)\  argument
199 register bpf_u_int32 *_x = a, *_y = b;\
207 #define SET_SUBTRACT(a, b, n)\ argument
209 register bpf_u_int32 *_x = a, *_y = b;\
217 #define SET_UNION(a, b, n)\ argument
219 register bpf_u_int32 *_x = a, *_y = b;\
229 #define MAX(a,b) ((a)>(b)?(a):(b)) argument
232 static void find_levels_r(b) in find_levels_r() argument
233 struct block *b; in find_levels_r()
237 if (isMarked(b))
240 Mark(b);
241 b->link = 0;
243 if (JT(b))
245 find_levels_r(JT(b));
246 find_levels_r(JF(b));
247 level = MAX(JT(b)->level, JF(b)->level) + 1;
251 b->level = level;
252 b->link = levels[level];
253 levels[level] = b;
278 struct block *b; local
295 for (b = levels[i]; b; b = b->link)
297 SET_INSERT(b->dom, b->id);
298 if (JT(b) == 0)
300 SET_INTERSECT(JT(b)->dom, b->dom, nodewords);
301 SET_INTERSECT(JF(b)->dom, b->dom, nodewords);
326 struct block *b; local
337 for (b = levels[i]; b != 0; b = b->link)
339 propedom(&b->et);
340 propedom(&b->ef);
356 struct block *b; local
366 for (b = levels[i]; b; b = b->link)
368 SET_INSERT(b->closure, b->id);
369 if (JT(b) == 0)
371 SET_UNION(JT(b)->closure, b->closure, nodewords);
372 SET_UNION(JF(b)->closure, b->closure, nodewords);
464 static void compute_local_ud(b) in compute_local_ud() argument
465 struct block *b; in compute_local_ud()
471 for (s = b->stmts; s; s = s->next)
501 if (BPF_CLASS(b->s.code) == BPF_JMP)
506 atom = atomuse(&b->s);
526 b->def = def;
527 b->kill = kill;
528 b->in_use = use;
651 bpf_u_int32 a, b; local
654 b = vmap[v1].const_val;
659 a += b;
663 a -= b;
667 a *= b;
671 if (b == 0)
673 a /= b;
677 a &= b;
681 a |= b;
685 a <<= b;
689 a >>= b;
712 static void opt_not(b) in opt_not() argument
713 struct block *b; in opt_not()
715 struct block *tmp = JT(b);
717 JT(b) = JF(b);
718 JF(b) = tmp;
721 static void opt_peep(b) in opt_peep() argument
722 struct block *b; in opt_peep()
728 s = b->stmts;
784 if (ATOMELEM(b->out_use, X_ATOM))
857 if (b->s.code == (BPF_JMP | BPF_JEQ | BPF_K) && !ATOMELEM(b->out_use, A_ATOM))
865 val = b->val[X_ATOM];
877 b->s.k += vmap[val].const_val;
881 else if (b->s.k == 0)
893 b->s.code = BPF_JMP | BPF_JEQ | BPF_X;
906 b->s.k += last->s.k;
916 else if (last->s.code == (BPF_ALU | BPF_AND | BPF_K) && b->s.k == 0)
918 b->s.k = last->s.k;
919 b->s.code = BPF_JMP | BPF_K | BPF_JSET;
922 opt_not(b);
929 if (b->s.code == (BPF_JMP | BPF_K | BPF_JSET))
931 if (b->s.k == 0)
932 JT(b) = JF(b);
933 if (b->s.k == 0xffffffff)
934 JF(b) = JT(b);
941 val = b->val[X_ATOM];
942 if (vmap[val].is_const && BPF_SRC(b->s.code) == BPF_X)
945 b->s.code &= ~BPF_X;
946 b->s.k = v;
952 val = b->val[A_ATOM];
953 if (vmap[val].is_const && BPF_SRC(b->s.code) == BPF_K)
956 switch (BPF_OP(b->s.code))
960 v = v == b->s.k;
964 v = (unsigned) v > b->s.k;
968 v = (unsigned) v >= b->s.k;
972 v &= b->s.k;
978 if (JF(b) != JT(b))
981 JF(b) = JT(b);
983 JT(b) = JF(b);
1221 static void opt_deadstores(b) in opt_deadstores() argument
1222 register struct block *b; in opt_deadstores()
1230 for (s = b->stmts; s != 0; s = s->next)
1232 deadstmt(&b->s, last);
1235 if (last[atom] && !ATOMELEM(b->out_use, atom))
1242 static void opt_blk(b, do_stmts) in opt_blk() argument
1243 struct block *b; in opt_blk()
1252 for (s = b->stmts; s && s->next; s = s->next)
1263 p = b->in_edges;
1270 memset((char *) b->val, 0, sizeof(b->val));
1280 memcpy((char *) b->val, (char *) p->pred->val, sizeof(b->val));
1292 if (b->val[i] != p->pred->val[i])
1293 b->val[i] = 0;
1296 aval = b->val[A_ATOM];
1297 xval = b->val[X_ATOM];
1298 for (s = b->stmts; s; s = s->next)
1299 opt_stmt(&s->s, b->val, do_stmts);
1325 ((b->out_use == 0 && aval != 0 && b->val[A_ATOM] == aval &&
1326 xval != 0 && b->val[X_ATOM] == xval) || BPF_CLASS(b->s.code) == BPF_RET))
1328 if (b->stmts != 0)
1330 b->stmts = 0;
1336 opt_peep(b);
1337 opt_deadstores(b);
1342 if (BPF_SRC(b->s.code) == BPF_K)
1343 b->oval = K(b->s.k);
1345 b->oval = b->val[X_ATOM];
1346 b->et.code = b->s.code;
1347 b->ef.code = -b->s.code;
1355 static int use_conflict(b, succ) in use_conflict() argument
1356 struct block *b, *succ; in use_conflict()
1366 if (b->val[atom] != succ->val[atom])
1486 static void or_pullup(b) in or_pullup() argument
1487 struct block *b; in or_pullup()
1494 ep = b->in_edges;
1507 if (JT(b->in_edges->pred) == b)
1508 diffp = &JT(b->in_edges->pred);
1510 diffp = &JF(b->in_edges->pred);
1518 if (JT(*diffp) != JT(b))
1521 if (!SET_MEMBER((*diffp)->dom, b->id))
1536 if (JT(*samep) != JT(b))
1539 if (!SET_MEMBER((*samep)->dom, b->id))
1568 for (ep = b->in_edges; ep != 0; ep = ep->next)
1570 if (JT(ep->pred) == b)
1582 static void and_pullup(b) in and_pullup() argument
1583 struct block *b; in and_pullup()
1590 ep = b->in_edges;
1602 if (JT(b->in_edges->pred) == b)
1603 diffp = &JT(b->in_edges->pred);
1605 diffp = &JF(b->in_edges->pred);
1613 if (JF(*diffp) != JF(b))
1616 if (!SET_MEMBER((*diffp)->dom, b->id))
1631 if (JF(*samep) != JF(b))
1634 if (!SET_MEMBER((*samep)->dom, b->id))
1663 for (ep = b->in_edges; ep != 0; ep = ep->next)
1665 if (JT(ep->pred) == b)
1731 struct block *b; local
1742 for (b = levels[i]; b != 0; b = b->link)
1744 link_inedge(&b->et, JT(b));
1745 link_inedge(&b->ef, JF(b));
1750 static void opt_root(b) in opt_root() argument
1751 struct block **b; in opt_root()
1755 s = (*b)->stmts;
1756 (*b)->stmts = 0;
1757 while (BPF_CLASS((*b)->s.code) == BPF_JMP && JT(*b) == JF(*b))
1758 *b = JT(*b);
1760 tmp = (*b)->stmts;
1763 (*b)->stmts = s;
1770 if (BPF_CLASS((*b)->s.code) == BPF_RET)
1771 (*b)->stmts = 0;
2091 register struct block *b = blocks[i]; local
2093 b->et.edom = p;
2095 b->ef.edom = p;
2097 b->et.id = i;
2098 edges[i] = &b->et;
2099 b->ef.id = n_blocks + i;
2100 edges[n_blocks + i] = &b->ef;
2101 b->et.pred = b;
2102 b->ef.pred = b;