1 //===- TFUtils.cpp - tensorflow evaluation utilities ----------------------===//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file implements utilities for interfacing with tensorflow C APIs.
11 //
12 //===----------------------------------------------------------------------===//
13 #include "llvm/Config/config.h"
14 #if defined(LLVM_HAVE_TF_API)
15 
16 #include "llvm/Analysis/Utils/TFUtils.h"
17 #include "llvm/ADT/Twine.h"
18 #include "llvm/Support/Debug.h"
19 #include "llvm/Support/ManagedStatic.h"
20 #include "llvm/Support/raw_ostream.h"
21 
22 #include "tensorflow/c/c_api.h"
23 #include "tensorflow/c/c_api_experimental.h"
24 
25 #include <cassert>
26 
27 using namespace llvm;
28 
29 namespace {
30 
31 using TFGraphPtr = std::unique_ptr<TF_Graph, decltype(&TF_DeleteGraph)>;
32 using TFSessionOptionsPtr =
33     std::unique_ptr<TF_SessionOptions, decltype(&TF_DeleteSessionOptions)>;
34 using TFStatusPtr = std::unique_ptr<TF_Status, decltype(&TF_DeleteStatus)>;
35 
36 struct TFInitializer {
37   TFInitializer() {
38     assert(!IsInitialized && "TFInitialized should be called only once");
39     int Argc = 1;
40     const char *Name = "";
41     const char **NamePtr = &Name;
42     TF_InitMain(Name, &Argc, const_cast<char ***>(&NamePtr));
43     IsInitialized = true;
44   }
45   bool IsInitialized = false;
46 };
47 
48 llvm::ManagedStatic<TFInitializer> TFLibInitializer;
49 
50 bool ensureInitTF() { return TFLibInitializer->IsInitialized; }
51 
52 TFGraphPtr createTFGraph() {
53   return TFGraphPtr(TF_NewGraph(), &TF_DeleteGraph);
54 }
55 
56 TFStatusPtr createTFStatus() {
57   return TFStatusPtr(TF_NewStatus(), &TF_DeleteStatus);
58 }
59 
60 TFSessionOptionsPtr createTFSessionOptions() {
61   return TFSessionOptionsPtr(TF_NewSessionOptions(), &TF_DeleteSessionOptions);
62 }
63 } // namespace
64 
65 namespace llvm {
66 class EvaluationResultImpl {
67 public:
68   EvaluationResultImpl(size_t OutputSize)
69       : OutputSize(OutputSize), Output(OutputSize){};
70 
71   ~EvaluationResultImpl() {
72     for (auto *P : Output)
73       if (P)
74         TF_DeleteTensor(P);
75   }
76 
77   EvaluationResultImpl(const EvaluationResultImpl &) = delete;
78   EvaluationResultImpl(EvaluationResultImpl &&Other) = delete;
79   std::vector<TF_Tensor *> &getOutput() { return Output; }
80 
81 private:
82   const size_t OutputSize;
83   std::vector<TF_Tensor *> Output;
84 };
85 
86 class TFModelEvaluatorImpl {
87 public:
88   TFModelEvaluatorImpl(StringRef SavedModelPath,
89                        const std::vector<std::string> &InputNames,
90                        const std::vector<std::string> &OutputNames,
91                        const char *Tags);
92 
93   bool isValid() const { return IsValid; }
94   size_t OutputSize() const { return OutputFeed.size(); }
95 
96   void evaluate(TF_Tensor **Output, TF_Status *Status) {
97     TF_SessionRun(Session, nullptr, InputFeed.data(), Input.data(),
98                   Input.size(), OutputFeed.data(), Output, OutputFeed.size(),
99                   nullptr, 0, nullptr, Status);
100   }
101 
102   void initInput(size_t Index, TF_DataType Type,
103                  const std::vector<int64_t> &Dimensions);
104   const std::vector<TF_Tensor *> &getInput() const { return Input; }
105 
106   ~TFModelEvaluatorImpl();
107 
108 private:
109   /// The objects necessary for carrying out an evaluation of the SavedModel.
110   /// They are expensive to set up, and we maintain them accross all the
111   /// evaluations of the model.
112   TF_Session *Session = nullptr;
113   TFGraphPtr Graph;
114   TFSessionOptionsPtr Options;
115 
116   /// The specification of the input nodes.
117   std::vector<TF_Output> InputFeed;
118 
119   /// The input tensors. They must match by index of the corresponding InputFeed
120   /// value. We set up the tensors once and just mutate theirs scalars before
121   /// each evaluation. The input tensors keep their value after an evaluation.
122   std::vector<TF_Tensor *> Input;
123 
124   /// The specification of the output nodes. When evaluating, the tensors in the
125   /// output tensor vector must match by index the corresponding element in the
126   /// OutputFeed.
127   std::vector<TF_Output> OutputFeed;
128 
129   void invalidate() { IsValid = false; }
130 
131   bool IsValid = true;
132 
133   /// Reusable utility for ensuring we can bind the requested Name to a node in
134   /// the SavedModel Graph.
135   bool checkReportAndInvalidate(const TF_Output &Output, StringRef Name);
136 };
137 } // namespace llvm
138 
139 TFModelEvaluatorImpl::TFModelEvaluatorImpl(
140     StringRef SavedModelPath, const std::vector<std::string> &InputNames,
141     const std::vector<std::string> &OutputNames, const char *Tags)
142     : Graph(createTFGraph()), Options(createTFSessionOptions()),
143       InputFeed(InputNames.size()), Input(InputNames.size()),
144       OutputFeed(OutputNames.size()) {
145   if (!ensureInitTF()) {
146     errs() << "Tensorflow should have been initialized";
147     return;
148   }
149   auto Status = createTFStatus();
150 
151   Session = TF_LoadSessionFromSavedModel(Options.get(), nullptr,
152                                          SavedModelPath.str().c_str(), &Tags, 1,
153                                          Graph.get(), nullptr, Status.get());
154   if (TF_GetCode(Status.get()) != TF_Code::TF_OK) {
155     errs() << TF_Message(Status.get());
156     invalidate();
157   }
158   for (size_t I = 0; I < InputNames.size(); ++I) {
159     InputFeed[I] = {
160         TF_GraphOperationByName(Graph.get(), (InputNames[I]).c_str()), 0};
161     if (!checkReportAndInvalidate(InputFeed[I], InputNames[I]))
162       return;
163   }
164   for (size_t I = 0; I < OutputNames.size(); ++I) {
165     OutputFeed[I] = {
166         TF_GraphOperationByName(Graph.get(), (OutputNames[I]).c_str()), 0};
167     if (!checkReportAndInvalidate(OutputFeed[I], OutputNames[I]))
168       return;
169   }
170 }
171 
172 TFModelEvaluator::TFModelEvaluator(StringRef SavedModelPath,
173                                    const std::vector<std::string> &InputNames,
174                                    const std::vector<std::string> &OutputNames,
175                                    const char *Tags)
176     : Impl(new TFModelEvaluatorImpl(SavedModelPath, InputNames, OutputNames,
177                                     Tags)) {
178   if (!Impl->isValid())
179     Impl.reset();
180 }
181 
182 TFModelEvaluatorImpl::~TFModelEvaluatorImpl() {
183   for (auto *T : Input) {
184     TF_DeleteTensor(T);
185   }
186   if (Session == nullptr)
187     return;
188   auto Status = createTFStatus();
189   TF_DeleteSession(Session, Status.get());
190   Session = nullptr;
191   if (TF_GetCode(Status.get()) != TF_Code::TF_OK)
192     errs() << "Could not delete TF session";
193 }
194 
195 bool TFModelEvaluatorImpl::checkReportAndInvalidate(const TF_Output &Output,
196                                                     StringRef Name) {
197   if (Output.oper)
198     return true;
199   errs() << "Could not find TF_Output named: " + Name;
200   IsValid = false;
201   return IsValid;
202 }
203 
204 Optional<TFModelEvaluator::EvaluationResult> TFModelEvaluator::evaluate() {
205   if (!isValid())
206     return None;
207   std::unique_ptr<EvaluationResultImpl> Ret =
208       std::make_unique<EvaluationResultImpl>(Impl->OutputSize());
209   auto Status = createTFStatus();
210   Impl->evaluate(Ret->getOutput().data(), Status.get());
211   if (TF_GetCode(Status.get()) != TF_Code::TF_OK) {
212     errs() << TF_Message(Status.get());
213     Impl.reset();
214     return None;
215   }
216   return EvaluationResult(std::move(Ret));
217 }
218 
219 void TFModelEvaluatorImpl::initInput(size_t Index, TF_DataType Type,
220                                      const std::vector<int64_t> &Dimensions) {
221   int64_t TotalSize = TF_DataTypeSize(Type);
222   for (auto &D : Dimensions)
223     TotalSize *= D;
224 
225   Input[Index] =
226       TF_AllocateTensor(Type, Dimensions.data(), Dimensions.size(), TotalSize);
227   std::memset(TF_TensorData(Input[Index]), 0, TotalSize);
228 }
229 
230 void *TFModelEvaluator::getUntypedInput(size_t Index) {
231   return TF_TensorData(Impl->getInput()[Index]);
232 }
233 
234 TFModelEvaluator::EvaluationResult::EvaluationResult(
235     std::unique_ptr<EvaluationResultImpl> Impl)
236     : Impl(std::move(Impl)) {}
237 
238 TFModelEvaluator::EvaluationResult::EvaluationResult(EvaluationResult &&Other)
239     : Impl(std::move(Other.Impl)) {}
240 
241 void *TFModelEvaluator::EvaluationResult::getUntypedTensorValue(size_t Index) {
242   return TF_TensorData(Impl->getOutput()[Index]);
243 }
244 
245 void TFModelEvaluator::initInput(size_t Index, int TypeIndex,
246                                  const std::vector<int64_t> &Dimensions) {
247   Impl->initInput(Index, static_cast<TF_DataType>(TypeIndex), Dimensions);
248 }
249 
250 template <> int TFModelEvaluator::getModelTypeIndex<float>() {
251   return TF_FLOAT;
252 }
253 
254 template <> int TFModelEvaluator::getModelTypeIndex<double>() {
255   return TF_DOUBLE;
256 }
257 
258 template <> int TFModelEvaluator::getModelTypeIndex<int8_t>() {
259   return TF_INT8;
260 }
261 
262 template <> int TFModelEvaluator::getModelTypeIndex<uint8_t>() {
263   return TF_UINT8;
264 }
265 
266 template <> int TFModelEvaluator::getModelTypeIndex<int16_t>() {
267   return TF_INT16;
268 }
269 
270 template <> int TFModelEvaluator::getModelTypeIndex<uint16_t>() {
271   return TF_UINT16;
272 }
273 
274 template <> int TFModelEvaluator::getModelTypeIndex<int32_t>() {
275   return TF_INT32;
276 }
277 
278 template <> int TFModelEvaluator::getModelTypeIndex<uint32_t>() {
279   return TF_UINT32;
280 }
281 
282 template <> int TFModelEvaluator::getModelTypeIndex<int64_t>() {
283   return TF_INT64;
284 }
285 
286 template <> int TFModelEvaluator::getModelTypeIndex<uint64_t>() {
287   return TF_UINT64;
288 }
289 
290 TFModelEvaluator::EvaluationResult::~EvaluationResult() {}
291 TFModelEvaluator::~TFModelEvaluator() {}
292 #endif // defined(LLVM_HAVE_TF_API)
293