blob: d213aab71c6dcf53b001ff3b7ca1cebd7cfe594c (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
|
#include <mbgl/actor/scheduler.hpp>
#include <mbgl/actor/mailbox.hpp>
#include <mbgl/util/platform.hpp>
#include <mbgl/util/string.hpp>
#include <condition_variable>
#include <mutex>
#include <queue>
#include <thread>
namespace mbgl {
class ThreadPool final : public Scheduler {
public:
explicit ThreadPool(std::size_t count);
~ThreadPool() override;
void schedule(std::weak_ptr<Mailbox>) override;
private:
std::vector<std::thread> threads;
std::queue<std::weak_ptr<Mailbox>> queue;
std::mutex mutex;
std::condition_variable cv;
bool terminate { false };
};
ThreadPool::ThreadPool(std::size_t count) {
threads.reserve(count);
for (std::size_t i = 0; i < count; ++i) {
threads.emplace_back([this, i]() {
platform::setCurrentThreadName(std::string{ "Worker " } + util::toString(i + 1));
while (true) {
std::unique_lock<std::mutex> lock(mutex);
cv.wait(lock, [this] {
return !queue.empty() || terminate;
});
if (terminate) {
return;
}
auto mailbox = queue.front();
queue.pop();
lock.unlock();
Mailbox::maybeReceive(mailbox);
}
});
}
}
ThreadPool::~ThreadPool() {
{
std::lock_guard<std::mutex> lock(mutex);
terminate = true;
}
cv.notify_all();
for (auto& thread : threads) {
thread.join();
}
}
void ThreadPool::schedule(std::weak_ptr<Mailbox> mailbox) {
{
std::lock_guard<std::mutex> lock(mutex);
queue.push(mailbox);
}
cv.notify_one();
}
Scheduler& Scheduler::GetBackground() {
static std::unique_ptr<ThreadPool> pool(new ThreadPool(4));
return *pool;
}
} // namespace mbgl
|