1 //==-- llvm/Support/ThreadPool.cpp - A ThreadPool implementation -*- C++ -*-==//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file implements a crude C++11 based thread pool.
11 //
12 //===----------------------------------------------------------------------===//
13 
14 #include "llvm/Support/ThreadPool.h"
15 
16 #include "llvm/Config/llvm-config.h"
17 #include "llvm/Support/Threading.h"
18 #include "llvm/Support/raw_ostream.h"
19 
20 using namespace llvm;
21 
22 #if LLVM_ENABLE_THREADS
23 
24 // Default to hardware_concurrency
25 ThreadPool::ThreadPool() : ThreadPool(hardware_concurrency()) {}
26 
27 ThreadPool::ThreadPool(unsigned ThreadCount)
28     : ActiveThreads(0), EnableFlag(true) {
29   // Create ThreadCount threads that will loop forever, wait on QueueCondition
30   // for tasks to be queued or the Pool to be destroyed.
31   Threads.reserve(ThreadCount);
32   for (unsigned ThreadID = 0; ThreadID < ThreadCount; ++ThreadID) {
33     Threads.emplace_back([&] {
34       while (true) {
35         std::unique_ptr<TaskBase> Task;
36         {
37           std::unique_lock<std::mutex> LockGuard(QueueLock);
38           // Wait for tasks to be pushed in the queue
39           QueueCondition.wait(LockGuard,
40                               [&] { return !EnableFlag || !Tasks.empty(); });
41           // Exit condition
42           if (!EnableFlag && Tasks.empty())
43             return;
44           // Yeah, we have a task, grab it and release the lock on the queue
45 
46           // We first need to signal that we are active before popping the queue
47           // in order for wait() to properly detect that even if the queue is
48           // empty, there is still a task in flight.
49           {
50             std::unique_lock<std::mutex> LockGuard(CompletionLock);
51             ++ActiveThreads;
52           }
53           Task = std::move(Tasks.front());
54           Tasks.pop();
55         }
56         // Run the task we just grabbed
57         Task->execute();
58 
59         {
60           // Adjust `ActiveThreads`, in case someone waits on ThreadPool::wait()
61           std::unique_lock<std::mutex> LockGuard(CompletionLock);
62           --ActiveThreads;
63         }
64 
65         // Notify task completion, in case someone waits on ThreadPool::wait()
66         CompletionCondition.notify_all();
67       }
68     });
69   }
70 }
71 
72 void ThreadPool::wait() {
73   // Wait for all threads to complete and the queue to be empty
74   std::unique_lock<std::mutex> LockGuard(CompletionLock);
75   // The order of the checks for ActiveThreads and Tasks.empty() matters because
76   // any active threads might be modifying the Tasks queue, and this would be a
77   // race.
78   CompletionCondition.wait(LockGuard,
79                            [&] { return !ActiveThreads && Tasks.empty(); });
80 }
81 
82 // The destructor joins all threads, waiting for completion.
83 ThreadPool::~ThreadPool() {
84   {
85     std::unique_lock<std::mutex> LockGuard(QueueLock);
86     EnableFlag = false;
87   }
88   QueueCondition.notify_all();
89   for (auto &Worker : Threads)
90     Worker.join();
91 }
92 
93 #else // LLVM_ENABLE_THREADS Disabled
94 
95 ThreadPool::ThreadPool() : ThreadPool(0) {}
96 
97 // No threads are launched, issue a warning if ThreadCount is not 0
98 ThreadPool::ThreadPool(unsigned ThreadCount)
99     : ActiveThreads(0) {
100   if (ThreadCount) {
101     errs() << "Warning: request a ThreadPool with " << ThreadCount
102            << " threads, but LLVM_ENABLE_THREADS has been turned off\n";
103   }
104 }
105 
106 void ThreadPool::wait() {
107   // Sequential implementation running the tasks
108   while (!Tasks.empty()) {
109     auto Task = std::move(Tasks.front());
110     Tasks.pop();
111     Task();
112   }
113 }
114 
115 std::shared_future<void> ThreadPool::asyncImpl(TaskTy Task) {
116   // Get a Future with launch::deferred execution using std::async
117   auto Future = std::async(std::launch::deferred, std::move(Task)).share();
118   // Wrap the future so that both ThreadPool::wait() can operate and the
119   // returned future can be sync'ed on.
120   PackagedTaskTy PackagedTask([Future]() { Future.get(); });
121   Tasks.push(std::move(PackagedTask));
122   return Future;
123 }
124 
125 ThreadPool::~ThreadPool() {
126   wait();
127 }
128 
129 #endif
130