1 //===--- Passes/CallGraph.cpp ---------------------------------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 //===----------------------------------------------------------------------===//
10 
11 #include "bolt/Passes/CallGraph.h"
12 
13 #define DEBUG_TYPE "callgraph"
14 
15 #if defined(__x86_64__) && !defined(_MSC_VER)
16 #  if (!defined USE_SSECRC)
17 #    define USE_SSECRC
18 #  endif
19 #else
20 #  undef USE_SSECRC
21 #endif
22 
23 namespace {
24 
25 inline size_t hash_int64_fallback(int64_t k) {
26   uint64_t key = (unsigned long long)k;
27   // "64 bit Mix Functions", from Thomas Wang's "Integer Hash Function."
28   // http://www.concentric.net/~ttwang/tech/inthash.htm
29   key = (~key) + (key << 21); // key = (key << 21) - key - 1;
30   key = key ^ (key >> 24);
31   key = (key + (key << 3)) + (key << 8); // key * 265
32   key = key ^ (key >> 14);
33   key = (key + (key << 2)) + (key << 4); // key * 21
34   key = key ^ (key >> 28);
35   return static_cast<size_t>(static_cast<uint32_t>(key));
36 }
37 
38 inline size_t hash_int64(int64_t k) {
39 #if defined(USE_SSECRC) && defined(__SSE4_2__)
40   size_t h = 0;
41   __asm("crc32q %1, %0\n" : "+r"(h) : "rm"(k));
42   return h;
43 #else
44   return hash_int64_fallback(k);
45 #endif
46 }
47 
48 inline size_t hash_int64_pair(int64_t k1, int64_t k2) {
49 #if defined(USE_SSECRC) && defined(__SSE4_2__)
50   // crc32 is commutative, so we need to perturb k1 so that (k1, k2) hashes
51   // differently from (k2, k1).
52   k1 += k1;
53   __asm("crc32q %1, %0\n" : "+r" (k1) : "rm"(k2));
54   return k1;
55 #else
56   return (hash_int64(k1) << 1) ^ hash_int64(k2);
57 #endif
58 }
59 
60 }
61 
62 namespace llvm {
63 namespace bolt {
64 
65 int64_t CallGraph::Arc::Hash::operator()(const Arc &Arc) const {
66 #ifdef USE_STD_HASH
67   std::hash<int64_t> Hasher;
68   return hashCombine(Hasher(Arc.src()), Arc.dst());
69 #else
70   return hash_int64_pair(int64_t(Arc.src()), int64_t(Arc.dst()));
71 #endif
72 }
73 
74 CallGraph::NodeId CallGraph::addNode(uint32_t Size, uint64_t Samples) {
75   NodeId Id = Nodes.size();
76   Nodes.emplace_back(Size, Samples);
77   return Id;
78 }
79 
80 const CallGraph::Arc &CallGraph::incArcWeight(NodeId Src, NodeId Dst, double W,
81                                               double Offset) {
82   assert(Offset <= size(Src) && "Call offset exceeds function size");
83 
84   std::pair<ArcIterator, bool> Res = Arcs.emplace(Src, Dst, W);
85   if (!Res.second) {
86     Res.first->Weight += W;
87     Res.first->AvgCallOffset += Offset * W;
88     return *Res.first;
89   }
90   Res.first->AvgCallOffset = Offset * W;
91   Nodes[Src].Succs.push_back(Dst);
92   Nodes[Dst].Preds.push_back(Src);
93   return *Res.first;
94 }
95 
96 void CallGraph::normalizeArcWeights() {
97   for (NodeId FuncId = 0; FuncId < numNodes(); ++FuncId) {
98     const Node &Func = getNode(FuncId);
99     for (NodeId Caller : Func.predecessors()) {
100       ArcIterator Arc = findArc(Caller, FuncId);
101       Arc->NormalizedWeight = Arc->weight() / Func.samples();
102       if (Arc->weight() > 0)
103         Arc->AvgCallOffset /= Arc->weight();
104       assert(Arc->AvgCallOffset <= size(Caller) &&
105              "Avg call offset exceeds function size");
106     }
107   }
108 }
109 
110 void CallGraph::adjustArcWeights() {
111   for (NodeId FuncId = 0; FuncId < numNodes(); ++FuncId) {
112     const Node &Func = getNode(FuncId);
113     uint64_t InWeight = 0;
114     for (NodeId Caller : Func.predecessors()) {
115       ArcIterator Arc = findArc(Caller, FuncId);
116       InWeight += (uint64_t)Arc->weight();
117     }
118     if (Func.samples() < InWeight)
119       setSamples(FuncId, InWeight);
120   }
121 }
122 
123 }
124 }
125