1 //===- ExecutionEngine.cpp - MLIR Execution engine and utils --------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file implements the execution engine for MLIR modules based on LLVM Orc
10 // JIT engine.
11 //
12 //===----------------------------------------------------------------------===//
13 #include "mlir/ExecutionEngine/ExecutionEngine.h"
14 #include "mlir/Dialect/LLVMIR/LLVMDialect.h"
15 #include "mlir/IR/Function.h"
16 #include "mlir/IR/Module.h"
17 #include "mlir/Support/FileUtilities.h"
18 #include "mlir/Target/LLVMIR.h"
19 
20 #include "llvm/ExecutionEngine/JITEventListener.h"
21 #include "llvm/ExecutionEngine/ObjectCache.h"
22 #include "llvm/ExecutionEngine/Orc/CompileUtils.h"
23 #include "llvm/ExecutionEngine/Orc/ExecutionUtils.h"
24 #include "llvm/ExecutionEngine/Orc/IRCompileLayer.h"
25 #include "llvm/ExecutionEngine/Orc/IRTransformLayer.h"
26 #include "llvm/ExecutionEngine/Orc/JITTargetMachineBuilder.h"
27 #include "llvm/ExecutionEngine/Orc/RTDyldObjectLinkingLayer.h"
28 #include "llvm/ExecutionEngine/SectionMemoryManager.h"
29 #include "llvm/IR/IRBuilder.h"
30 #include "llvm/Support/Debug.h"
31 #include "llvm/Support/Error.h"
32 #include "llvm/Support/Host.h"
33 #include "llvm/Support/TargetRegistry.h"
34 #include "llvm/Support/ToolOutputFile.h"
35 
36 #define DEBUG_TYPE "execution-engine"
37 
38 using namespace mlir;
39 using llvm::dbgs;
40 using llvm::Error;
41 using llvm::errs;
42 using llvm::Expected;
43 using llvm::LLVMContext;
44 using llvm::MemoryBuffer;
45 using llvm::MemoryBufferRef;
46 using llvm::Module;
47 using llvm::SectionMemoryManager;
48 using llvm::StringError;
49 using llvm::Triple;
50 using llvm::orc::DynamicLibrarySearchGenerator;
51 using llvm::orc::ExecutionSession;
52 using llvm::orc::IRCompileLayer;
53 using llvm::orc::JITTargetMachineBuilder;
54 using llvm::orc::RTDyldObjectLinkingLayer;
55 using llvm::orc::ThreadSafeModule;
56 using llvm::orc::TMOwningSimpleCompiler;
57 
58 /// Wrap a string into an llvm::StringError.
59 static Error make_string_error(const Twine &message) {
60   return llvm::make_error<StringError>(message.str(),
61                                        llvm::inconvertibleErrorCode());
62 }
63 
64 void SimpleObjectCache::notifyObjectCompiled(const Module *M,
65                                              MemoryBufferRef ObjBuffer) {
66   cachedObjects[M->getModuleIdentifier()] = MemoryBuffer::getMemBufferCopy(
67       ObjBuffer.getBuffer(), ObjBuffer.getBufferIdentifier());
68 }
69 
70 std::unique_ptr<MemoryBuffer> SimpleObjectCache::getObject(const Module *M) {
71   auto I = cachedObjects.find(M->getModuleIdentifier());
72   if (I == cachedObjects.end()) {
73     LLVM_DEBUG(dbgs() << "No object for " << M->getModuleIdentifier()
74                       << " in cache. Compiling.\n");
75     return nullptr;
76   }
77   LLVM_DEBUG(dbgs() << "Object for " << M->getModuleIdentifier()
78                     << " loaded from cache.\n");
79   return MemoryBuffer::getMemBuffer(I->second->getMemBufferRef());
80 }
81 
82 void SimpleObjectCache::dumpToObjectFile(StringRef outputFilename) {
83   // Set up the output file.
84   std::string errorMessage;
85   auto file = openOutputFile(outputFilename, &errorMessage);
86   if (!file) {
87     llvm::errs() << errorMessage << "\n";
88     return;
89   }
90 
91   // Dump the object generated for a single module to the output file.
92   assert(cachedObjects.size() == 1 && "Expected only one object entry.");
93   auto &cachedObject = cachedObjects.begin()->second;
94   file->os() << cachedObject->getBuffer();
95   file->keep();
96 }
97 
98 void ExecutionEngine::dumpToObjectFile(StringRef filename) {
99   cache->dumpToObjectFile(filename);
100 }
101 
102 // Setup LLVM target triple from the current machine.
103 bool ExecutionEngine::setupTargetTriple(Module *llvmModule) {
104   // Setup the machine properties from the current architecture.
105   auto targetTriple = llvm::sys::getDefaultTargetTriple();
106   std::string errorMessage;
107   auto target = llvm::TargetRegistry::lookupTarget(targetTriple, errorMessage);
108   if (!target) {
109     errs() << "NO target: " << errorMessage << "\n";
110     return true;
111   }
112   std::unique_ptr<llvm::TargetMachine> machine(
113       target->createTargetMachine(targetTriple, "generic", "", {}, {}));
114   llvmModule->setDataLayout(machine->createDataLayout());
115   llvmModule->setTargetTriple(targetTriple);
116   return false;
117 }
118 
119 static std::string makePackedFunctionName(StringRef name) {
120   return "_mlir_" + name.str();
121 }
122 
123 // For each function in the LLVM module, define an interface function that wraps
124 // all the arguments of the original function and all its results into an i8**
125 // pointer to provide a unified invocation interface.
126 static void packFunctionArguments(Module *module) {
127   auto &ctx = module->getContext();
128   llvm::IRBuilder<> builder(ctx);
129   DenseSet<llvm::Function *> interfaceFunctions;
130   for (auto &func : module->getFunctionList()) {
131     if (func.isDeclaration()) {
132       continue;
133     }
134     if (interfaceFunctions.count(&func)) {
135       continue;
136     }
137 
138     // Given a function `foo(<...>)`, define the interface function
139     // `mlir_foo(i8**)`.
140     auto newType = llvm::FunctionType::get(
141         builder.getVoidTy(), builder.getInt8PtrTy()->getPointerTo(),
142         /*isVarArg=*/false);
143     auto newName = makePackedFunctionName(func.getName());
144     auto funcCst = module->getOrInsertFunction(newName, newType);
145     llvm::Function *interfaceFunc = cast<llvm::Function>(funcCst.getCallee());
146     interfaceFunctions.insert(interfaceFunc);
147 
148     // Extract the arguments from the type-erased argument list and cast them to
149     // the proper types.
150     auto bb = llvm::BasicBlock::Create(ctx);
151     bb->insertInto(interfaceFunc);
152     builder.SetInsertPoint(bb);
153     llvm::Value *argList = interfaceFunc->arg_begin();
154     SmallVector<llvm::Value *, 8> args;
155     args.reserve(llvm::size(func.args()));
156     for (auto &indexedArg : llvm::enumerate(func.args())) {
157       llvm::Value *argIndex = llvm::Constant::getIntegerValue(
158           builder.getInt64Ty(), APInt(64, indexedArg.index()));
159       llvm::Value *argPtrPtr = builder.CreateGEP(argList, argIndex);
160       llvm::Value *argPtr = builder.CreateLoad(argPtrPtr);
161       argPtr = builder.CreateBitCast(
162           argPtr, indexedArg.value().getType()->getPointerTo());
163       llvm::Value *arg = builder.CreateLoad(argPtr);
164       args.push_back(arg);
165     }
166 
167     // Call the implementation function with the extracted arguments.
168     llvm::Value *result = builder.CreateCall(&func, args);
169 
170     // Assuming the result is one value, potentially of type `void`.
171     if (!result->getType()->isVoidTy()) {
172       llvm::Value *retIndex = llvm::Constant::getIntegerValue(
173           builder.getInt64Ty(), APInt(64, llvm::size(func.args())));
174       llvm::Value *retPtrPtr = builder.CreateGEP(argList, retIndex);
175       llvm::Value *retPtr = builder.CreateLoad(retPtrPtr);
176       retPtr = builder.CreateBitCast(retPtr, result->getType()->getPointerTo());
177       builder.CreateStore(result, retPtr);
178     }
179 
180     // The interface function returns void.
181     builder.CreateRetVoid();
182   }
183 }
184 
185 ExecutionEngine::ExecutionEngine(bool enableObjectCache,
186                                  bool enableGDBNotificationListener)
187     : cache(enableObjectCache ? new SimpleObjectCache() : nullptr),
188       gdbListener(enableGDBNotificationListener
189                       ? llvm::JITEventListener::createGDBRegistrationListener()
190                       : nullptr) {}
191 
192 Expected<std::unique_ptr<ExecutionEngine>> ExecutionEngine::create(
193     ModuleOp m, std::function<Error(llvm::Module *)> transformer,
194     Optional<llvm::CodeGenOpt::Level> jitCodeGenOptLevel,
195     ArrayRef<StringRef> sharedLibPaths, bool enableObjectCache,
196     bool enableGDBNotificationListener) {
197   auto engine = std::make_unique<ExecutionEngine>(
198       enableObjectCache, enableGDBNotificationListener);
199 
200   std::unique_ptr<llvm::LLVMContext> ctx(new llvm::LLVMContext);
201   auto llvmModule = translateModuleToLLVMIR(m);
202   if (!llvmModule)
203     return make_string_error("could not convert to LLVM IR");
204   // FIXME: the triple should be passed to the translation or dialect conversion
205   // instead of this.  Currently, the LLVM module created above has no triple
206   // associated with it.
207   setupTargetTriple(llvmModule.get());
208   packFunctionArguments(llvmModule.get());
209 
210   // Clone module in a new LLVMContext since translateModuleToLLVMIR buries
211   // ownership too deeply.
212   // TODO(zinenko): Reevaluate model of ownership of LLVMContext in LLVMDialect.
213   std::unique_ptr<Module> deserModule =
214       LLVM::cloneModuleIntoNewContext(ctx.get(), llvmModule.get());
215   auto dataLayout = deserModule->getDataLayout();
216 
217   // Callback to create the object layer with symbol resolution to current
218   // process and dynamically linked libraries.
219   auto objectLinkingLayerCreator = [&](ExecutionSession &session,
220                                        const Triple &TT) {
221     auto objectLayer = std::make_unique<RTDyldObjectLinkingLayer>(
222         session, []() { return std::make_unique<SectionMemoryManager>(); });
223     objectLayer->setNotifyLoaded(
224         [engine = engine.get()](
225             llvm::orc::VModuleKey, const llvm::object::ObjectFile &object,
226             const llvm::RuntimeDyld::LoadedObjectInfo &objectInfo) {
227           if (engine->gdbListener) {
228             uint64_t key = static_cast<uint64_t>(
229                 reinterpret_cast<uintptr_t>(object.getData().data()));
230             engine->gdbListener->notifyObjectLoaded(key, object, objectInfo);
231           }
232         });
233 
234     // Resolve symbols from shared libraries.
235     for (auto libPath : sharedLibPaths) {
236       auto mb = llvm::MemoryBuffer::getFile(libPath);
237       if (!mb) {
238         errs() << "Fail to create MemoryBuffer for: " << libPath << "\n";
239         continue;
240       }
241       auto &JD = session.createBareJITDylib(std::string(libPath));
242       auto loaded = DynamicLibrarySearchGenerator::Load(
243           libPath.data(), dataLayout.getGlobalPrefix());
244       if (!loaded) {
245         errs() << "Could not load " << libPath << ":\n  " << loaded.takeError()
246                << "\n";
247         continue;
248       }
249       JD.addGenerator(std::move(*loaded));
250       cantFail(objectLayer->add(JD, std::move(mb.get())));
251     }
252 
253     return objectLayer;
254   };
255 
256   // Callback to inspect the cache and recompile on demand. This follows Lang's
257   // LLJITWithObjectCache example.
258   auto compileFunctionCreator = [&](JITTargetMachineBuilder JTMB)
259       -> Expected<std::unique_ptr<IRCompileLayer::IRCompiler>> {
260     if (jitCodeGenOptLevel)
261       JTMB.setCodeGenOptLevel(jitCodeGenOptLevel.getValue());
262     auto TM = JTMB.createTargetMachine();
263     if (!TM)
264       return TM.takeError();
265     return std::make_unique<TMOwningSimpleCompiler>(std::move(*TM),
266                                                     engine->cache.get());
267   };
268 
269   // Create the LLJIT by calling the LLJITBuilder with 2 callbacks.
270   auto jit =
271       cantFail(llvm::orc::LLJITBuilder()
272                    .setCompileFunctionCreator(compileFunctionCreator)
273                    .setObjectLinkingLayerCreator(objectLinkingLayerCreator)
274                    .create());
275 
276   // Add a ThreadSafemodule to the engine and return.
277   ThreadSafeModule tsm(std::move(deserModule), std::move(ctx));
278   if (transformer)
279     cantFail(tsm.withModuleDo(
280         [&](llvm::Module &module) { return transformer(&module); }));
281   cantFail(jit->addIRModule(std::move(tsm)));
282   engine->jit = std::move(jit);
283 
284   // Resolve symbols that are statically linked in the current process.
285   llvm::orc::JITDylib &mainJD = engine->jit->getMainJITDylib();
286   mainJD.addGenerator(
287       cantFail(DynamicLibrarySearchGenerator::GetForCurrentProcess(
288           dataLayout.getGlobalPrefix())));
289 
290   return std::move(engine);
291 }
292 
293 Expected<void (*)(void **)> ExecutionEngine::lookup(StringRef name) const {
294   auto expectedSymbol = jit->lookup(makePackedFunctionName(name));
295 
296   // JIT lookup may return an Error referring to strings stored internally by
297   // the JIT. If the Error outlives the ExecutionEngine, it would want have a
298   // dangling reference, which is currently caught by an assertion inside JIT
299   // thanks to hand-rolled reference counting. Rewrap the error message into a
300   // string before returning. Alternatively, ORC JIT should consider copying
301   // the string into the error message.
302   if (!expectedSymbol) {
303     std::string errorMessage;
304     llvm::raw_string_ostream os(errorMessage);
305     llvm::handleAllErrors(expectedSymbol.takeError(),
306                           [&os](llvm::ErrorInfoBase &ei) { ei.log(os); });
307     return make_string_error(os.str());
308   }
309 
310   auto rawFPtr = expectedSymbol->getAddress();
311   auto fptr = reinterpret_cast<void (*)(void **)>(rawFPtr);
312   if (!fptr)
313     return make_string_error("looked up function is null");
314   return fptr;
315 }
316 
317 Error ExecutionEngine::invoke(StringRef name, MutableArrayRef<void *> args) {
318   auto expectedFPtr = lookup(name);
319   if (!expectedFPtr)
320     return expectedFPtr.takeError();
321   auto fptr = *expectedFPtr;
322 
323   (*fptr)(args.data());
324 
325   return Error::success();
326 }
327