1 //===- ExecutionEngine.cpp - MLIR Execution engine and utils --------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file implements the execution engine for MLIR modules based on LLVM Orc
10 // JIT engine.
11 //
12 //===----------------------------------------------------------------------===//
13 #include "mlir/ExecutionEngine/ExecutionEngine.h"
14 #include "mlir/Dialect/LLVMIR/LLVMDialect.h"
15 #include "mlir/IR/BuiltinOps.h"
16 #include "mlir/Support/FileUtilities.h"
17 #include "mlir/Target/LLVMIR/Export.h"
18 
19 #include "llvm/ExecutionEngine/JITEventListener.h"
20 #include "llvm/ExecutionEngine/ObjectCache.h"
21 #include "llvm/ExecutionEngine/Orc/CompileUtils.h"
22 #include "llvm/ExecutionEngine/Orc/ExecutionUtils.h"
23 #include "llvm/ExecutionEngine/Orc/IRCompileLayer.h"
24 #include "llvm/ExecutionEngine/Orc/IRTransformLayer.h"
25 #include "llvm/ExecutionEngine/Orc/JITTargetMachineBuilder.h"
26 #include "llvm/ExecutionEngine/Orc/RTDyldObjectLinkingLayer.h"
27 #include "llvm/IR/IRBuilder.h"
28 #include "llvm/MC/SubtargetFeature.h"
29 #include "llvm/MC/TargetRegistry.h"
30 #include "llvm/Support/Debug.h"
31 #include "llvm/Support/Error.h"
32 #include "llvm/Support/Host.h"
33 #include "llvm/Support/ToolOutputFile.h"
34 
35 #define DEBUG_TYPE "execution-engine"
36 
37 using namespace mlir;
38 using llvm::dbgs;
39 using llvm::Error;
40 using llvm::errs;
41 using llvm::Expected;
42 using llvm::LLVMContext;
43 using llvm::MemoryBuffer;
44 using llvm::MemoryBufferRef;
45 using llvm::Module;
46 using llvm::SectionMemoryManager;
47 using llvm::StringError;
48 using llvm::Triple;
49 using llvm::orc::DynamicLibrarySearchGenerator;
50 using llvm::orc::ExecutionSession;
51 using llvm::orc::IRCompileLayer;
52 using llvm::orc::JITTargetMachineBuilder;
53 using llvm::orc::MangleAndInterner;
54 using llvm::orc::RTDyldObjectLinkingLayer;
55 using llvm::orc::SymbolMap;
56 using llvm::orc::ThreadSafeModule;
57 using llvm::orc::TMOwningSimpleCompiler;
58 
59 /// Wrap a string into an llvm::StringError.
60 static Error makeStringError(const Twine &message) {
61   return llvm::make_error<StringError>(message.str(),
62                                        llvm::inconvertibleErrorCode());
63 }
64 
65 void SimpleObjectCache::notifyObjectCompiled(const Module *m,
66                                              MemoryBufferRef objBuffer) {
67   cachedObjects[m->getModuleIdentifier()] = MemoryBuffer::getMemBufferCopy(
68       objBuffer.getBuffer(), objBuffer.getBufferIdentifier());
69 }
70 
71 std::unique_ptr<MemoryBuffer> SimpleObjectCache::getObject(const Module *m) {
72   auto i = cachedObjects.find(m->getModuleIdentifier());
73   if (i == cachedObjects.end()) {
74     LLVM_DEBUG(dbgs() << "No object for " << m->getModuleIdentifier()
75                       << " in cache. Compiling.\n");
76     return nullptr;
77   }
78   LLVM_DEBUG(dbgs() << "Object for " << m->getModuleIdentifier()
79                     << " loaded from cache.\n");
80   return MemoryBuffer::getMemBuffer(i->second->getMemBufferRef());
81 }
82 
83 void SimpleObjectCache::dumpToObjectFile(StringRef outputFilename) {
84   // Set up the output file.
85   std::string errorMessage;
86   auto file = openOutputFile(outputFilename, &errorMessage);
87   if (!file) {
88     llvm::errs() << errorMessage << "\n";
89     return;
90   }
91 
92   // Dump the object generated for a single module to the output file.
93   assert(cachedObjects.size() == 1 && "Expected only one object entry.");
94   auto &cachedObject = cachedObjects.begin()->second;
95   file->os() << cachedObject->getBuffer();
96   file->keep();
97 }
98 
99 void ExecutionEngine::dumpToObjectFile(StringRef filename) {
100   if (cache == nullptr) {
101     llvm::errs() << "cannot dump ExecutionEngine object code to file: "
102                     "object cache is disabled\n";
103     return;
104   }
105   cache->dumpToObjectFile(filename);
106 }
107 
108 void ExecutionEngine::registerSymbols(
109     llvm::function_ref<SymbolMap(MangleAndInterner)> symbolMap) {
110   auto &mainJitDylib = jit->getMainJITDylib();
111   cantFail(mainJitDylib.define(
112       absoluteSymbols(symbolMap(llvm::orc::MangleAndInterner(
113           mainJitDylib.getExecutionSession(), jit->getDataLayout())))));
114 }
115 
116 // Setup LLVM target triple from the current machine.
117 bool ExecutionEngine::setupTargetTriple(Module *llvmModule) {
118   // Setup the machine properties from the current architecture.
119   auto targetTriple = llvm::sys::getDefaultTargetTriple();
120   std::string errorMessage;
121   const auto *target =
122       llvm::TargetRegistry::lookupTarget(targetTriple, errorMessage);
123   if (!target) {
124     errs() << "NO target: " << errorMessage << "\n";
125     return true;
126   }
127 
128   std::string cpu(llvm::sys::getHostCPUName());
129   llvm::SubtargetFeatures features;
130   llvm::StringMap<bool> hostFeatures;
131 
132   if (llvm::sys::getHostCPUFeatures(hostFeatures))
133     for (auto &f : hostFeatures)
134       features.AddFeature(f.first(), f.second);
135 
136   std::unique_ptr<llvm::TargetMachine> machine(target->createTargetMachine(
137       targetTriple, cpu, features.getString(), {}, {}));
138   if (!machine) {
139     errs() << "Unable to create target machine\n";
140     return true;
141   }
142   llvmModule->setDataLayout(machine->createDataLayout());
143   llvmModule->setTargetTriple(targetTriple);
144   return false;
145 }
146 
147 static std::string makePackedFunctionName(StringRef name) {
148   return "_mlir_" + name.str();
149 }
150 
151 // For each function in the LLVM module, define an interface function that wraps
152 // all the arguments of the original function and all its results into an i8**
153 // pointer to provide a unified invocation interface.
154 static void packFunctionArguments(Module *module) {
155   auto &ctx = module->getContext();
156   llvm::IRBuilder<> builder(ctx);
157   DenseSet<llvm::Function *> interfaceFunctions;
158   for (auto &func : module->getFunctionList()) {
159     if (func.isDeclaration()) {
160       continue;
161     }
162     if (interfaceFunctions.count(&func)) {
163       continue;
164     }
165 
166     // Given a function `foo(<...>)`, define the interface function
167     // `mlir_foo(i8**)`.
168     auto *newType = llvm::FunctionType::get(
169         builder.getVoidTy(), builder.getInt8PtrTy()->getPointerTo(),
170         /*isVarArg=*/false);
171     auto newName = makePackedFunctionName(func.getName());
172     auto funcCst = module->getOrInsertFunction(newName, newType);
173     llvm::Function *interfaceFunc = cast<llvm::Function>(funcCst.getCallee());
174     interfaceFunctions.insert(interfaceFunc);
175 
176     // Extract the arguments from the type-erased argument list and cast them to
177     // the proper types.
178     auto *bb = llvm::BasicBlock::Create(ctx);
179     bb->insertInto(interfaceFunc);
180     builder.SetInsertPoint(bb);
181     llvm::Value *argList = interfaceFunc->arg_begin();
182     SmallVector<llvm::Value *, 8> args;
183     args.reserve(llvm::size(func.args()));
184     for (auto &indexedArg : llvm::enumerate(func.args())) {
185       llvm::Value *argIndex = llvm::Constant::getIntegerValue(
186           builder.getInt64Ty(), APInt(64, indexedArg.index()));
187       llvm::Value *argPtrPtr =
188           builder.CreateGEP(builder.getInt8PtrTy(), argList, argIndex);
189       llvm::Value *argPtr =
190           builder.CreateLoad(builder.getInt8PtrTy(), argPtrPtr);
191       llvm::Type *argTy = indexedArg.value().getType();
192       argPtr = builder.CreateBitCast(argPtr, argTy->getPointerTo());
193       llvm::Value *arg = builder.CreateLoad(argTy, argPtr);
194       args.push_back(arg);
195     }
196 
197     // Call the implementation function with the extracted arguments.
198     llvm::Value *result = builder.CreateCall(&func, args);
199 
200     // Assuming the result is one value, potentially of type `void`.
201     if (!result->getType()->isVoidTy()) {
202       llvm::Value *retIndex = llvm::Constant::getIntegerValue(
203           builder.getInt64Ty(), APInt(64, llvm::size(func.args())));
204       llvm::Value *retPtrPtr =
205           builder.CreateGEP(builder.getInt8PtrTy(), argList, retIndex);
206       llvm::Value *retPtr =
207           builder.CreateLoad(builder.getInt8PtrTy(), retPtrPtr);
208       retPtr = builder.CreateBitCast(retPtr, result->getType()->getPointerTo());
209       builder.CreateStore(result, retPtr);
210     }
211 
212     // The interface function returns void.
213     builder.CreateRetVoid();
214   }
215 }
216 
217 ExecutionEngine::ExecutionEngine(bool enableObjectCache,
218                                  bool enableGDBNotificationListener,
219                                  bool enablePerfNotificationListener)
220     : cache(enableObjectCache ? new SimpleObjectCache() : nullptr),
221       gdbListener(enableGDBNotificationListener
222                       ? llvm::JITEventListener::createGDBRegistrationListener()
223                       : nullptr),
224       perfListener(nullptr) {
225   if (enablePerfNotificationListener) {
226     if (auto *listener = llvm::JITEventListener::createPerfJITEventListener())
227       perfListener = listener;
228     else if (auto *listener =
229                  llvm::JITEventListener::createIntelJITEventListener())
230       perfListener = listener;
231   }
232 }
233 
234 Expected<std::unique_ptr<ExecutionEngine>>
235 ExecutionEngine::create(ModuleOp m, const ExecutionEngineOptions &options) {
236   auto engine = std::make_unique<ExecutionEngine>(
237       options.enableObjectCache, options.enableGDBNotificationListener,
238       options.enablePerfNotificationListener);
239 
240   std::unique_ptr<llvm::LLVMContext> ctx(new llvm::LLVMContext);
241   auto llvmModule = options.llvmModuleBuilder
242                         ? options.llvmModuleBuilder(m, *ctx)
243                         : translateModuleToLLVMIR(m, *ctx);
244   if (!llvmModule)
245     return makeStringError("could not convert to LLVM IR");
246   // FIXME: the triple should be passed to the translation or dialect conversion
247   // instead of this.  Currently, the LLVM module created above has no triple
248   // associated with it.
249   setupTargetTriple(llvmModule.get());
250   packFunctionArguments(llvmModule.get());
251 
252   auto dataLayout = llvmModule->getDataLayout();
253 
254   // Callback to create the object layer with symbol resolution to current
255   // process and dynamically linked libraries.
256   auto objectLinkingLayerCreator = [&](ExecutionSession &session,
257                                        const Triple &tt) {
258     auto objectLayer = std::make_unique<RTDyldObjectLinkingLayer>(
259         session, [sectionMemoryMapper = options.sectionMemoryMapper]() {
260           return std::make_unique<SectionMemoryManager>(sectionMemoryMapper);
261         });
262 
263     // Register JIT event listeners if they are enabled.
264     if (engine->gdbListener)
265       objectLayer->registerJITEventListener(*engine->gdbListener);
266     if (engine->perfListener)
267       objectLayer->registerJITEventListener(*engine->perfListener);
268 
269     // COFF format binaries (Windows) need special handling to deal with
270     // exported symbol visibility.
271     // cf llvm/lib/ExecutionEngine/Orc/LLJIT.cpp LLJIT::createObjectLinkingLayer
272     llvm::Triple targetTriple(llvm::Twine(llvmModule->getTargetTriple()));
273     if (targetTriple.isOSBinFormatCOFF()) {
274       objectLayer->setOverrideObjectFlagsWithResponsibilityFlags(true);
275       objectLayer->setAutoClaimResponsibilityForObjectSymbols(true);
276     }
277 
278     // Resolve symbols from shared libraries.
279     for (auto libPath : options.sharedLibPaths) {
280       auto mb = llvm::MemoryBuffer::getFile(libPath);
281       if (!mb) {
282         errs() << "Failed to create MemoryBuffer for: " << libPath
283                << "\nError: " << mb.getError().message() << "\n";
284         continue;
285       }
286       auto &jd = session.createBareJITDylib(std::string(libPath));
287       auto loaded = DynamicLibrarySearchGenerator::Load(
288           libPath.data(), dataLayout.getGlobalPrefix());
289       if (!loaded) {
290         errs() << "Could not load " << libPath << ":\n  " << loaded.takeError()
291                << "\n";
292         continue;
293       }
294       jd.addGenerator(std::move(*loaded));
295       cantFail(objectLayer->add(jd, std::move(mb.get())));
296     }
297 
298     return objectLayer;
299   };
300 
301   // Callback to inspect the cache and recompile on demand. This follows Lang's
302   // LLJITWithObjectCache example.
303   auto compileFunctionCreator = [&](JITTargetMachineBuilder jtmb)
304       -> Expected<std::unique_ptr<IRCompileLayer::IRCompiler>> {
305     if (options.jitCodeGenOptLevel)
306       jtmb.setCodeGenOptLevel(*options.jitCodeGenOptLevel);
307     auto tm = jtmb.createTargetMachine();
308     if (!tm)
309       return tm.takeError();
310     return std::make_unique<TMOwningSimpleCompiler>(std::move(*tm),
311                                                     engine->cache.get());
312   };
313 
314   // Create the LLJIT by calling the LLJITBuilder with 2 callbacks.
315   auto jit =
316       cantFail(llvm::orc::LLJITBuilder()
317                    .setCompileFunctionCreator(compileFunctionCreator)
318                    .setObjectLinkingLayerCreator(objectLinkingLayerCreator)
319                    .create());
320 
321   // Add a ThreadSafemodule to the engine and return.
322   ThreadSafeModule tsm(std::move(llvmModule), std::move(ctx));
323   if (options.transformer)
324     cantFail(tsm.withModuleDo(
325         [&](llvm::Module &module) { return options.transformer(&module); }));
326   cantFail(jit->addIRModule(std::move(tsm)));
327   engine->jit = std::move(jit);
328 
329   // Resolve symbols that are statically linked in the current process.
330   llvm::orc::JITDylib &mainJD = engine->jit->getMainJITDylib();
331   mainJD.addGenerator(
332       cantFail(DynamicLibrarySearchGenerator::GetForCurrentProcess(
333           dataLayout.getGlobalPrefix())));
334 
335   return std::move(engine);
336 }
337 
338 Expected<void (*)(void **)>
339 ExecutionEngine::lookupPacked(StringRef name) const {
340   auto result = lookup(makePackedFunctionName(name));
341   if (!result)
342     return result.takeError();
343   return reinterpret_cast<void (*)(void **)>(result.get());
344 }
345 
346 Expected<void *> ExecutionEngine::lookup(StringRef name) const {
347   auto expectedSymbol = jit->lookup(name);
348 
349   // JIT lookup may return an Error referring to strings stored internally by
350   // the JIT. If the Error outlives the ExecutionEngine, it would want have a
351   // dangling reference, which is currently caught by an assertion inside JIT
352   // thanks to hand-rolled reference counting. Rewrap the error message into a
353   // string before returning. Alternatively, ORC JIT should consider copying
354   // the string into the error message.
355   if (!expectedSymbol) {
356     std::string errorMessage;
357     llvm::raw_string_ostream os(errorMessage);
358     llvm::handleAllErrors(expectedSymbol.takeError(),
359                           [&os](llvm::ErrorInfoBase &ei) { ei.log(os); });
360     return makeStringError(os.str());
361   }
362 
363   if (void *fptr = expectedSymbol->toPtr<void *>())
364     return fptr;
365   return makeStringError("looked up function is null");
366 }
367 
368 Error ExecutionEngine::invokePacked(StringRef name,
369                                     MutableArrayRef<void *> args) {
370   auto expectedFPtr = lookupPacked(name);
371   if (!expectedFPtr)
372     return expectedFPtr.takeError();
373   auto fptr = *expectedFPtr;
374 
375   (*fptr)(args.data());
376 
377   return Error::success();
378 }
379