1 /*
2     Copyright (c) 2005-2020 Intel Corporation
3 
4     Licensed under the Apache License, Version 2.0 (the "License");
5     you may not use this file except in compliance with the License.
6     You may obtain a copy of the License at
7 
8         http://www.apache.org/licenses/LICENSE-2.0
9 
10     Unless required by applicable law or agreed to in writing, software
11     distributed under the License is distributed on an "AS IS" BASIS,
12     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13     See the License for the specific language governing permissions and
14     limitations under the License.
15 */
16 
17 #ifndef __TBB_cache_aligned_allocator_H
18 #define __TBB_cache_aligned_allocator_H
19 
20 #include "detail/_utils.h"
21 #include "detail/_namespace_injection.h"
22 #include <cstdlib>
23 #include <utility>
24 
25 #if __TBB_CPP17_MEMORY_RESOURCE_PRESENT
26 #include <memory_resource>
27 #endif
28 
29 namespace tbb {
30 namespace detail {
31 
32 namespace r1 {
33 void*       __TBB_EXPORTED_FUNC cache_aligned_allocate(std::size_t size);
34 void        __TBB_EXPORTED_FUNC cache_aligned_deallocate(void* p);
35 std::size_t __TBB_EXPORTED_FUNC cache_line_size();
36 }
37 
38 namespace d1 {
39 
40 template<typename T>
41 class cache_aligned_allocator {
42 public:
43     using value_type = T;
44     using propagate_on_container_move_assignment = std::true_type;
45 
46     //! Always defined for TBB containers (supported since C++17 for std containers)
47     using is_always_equal = std::true_type;
48 
49     cache_aligned_allocator() = default;
50     template<typename U> cache_aligned_allocator(const cache_aligned_allocator<U>&) noexcept {}
51 
52     //! Allocate space for n objects, starting on a cache/sector line.
53     T* allocate(std::size_t n) {
54         return static_cast<T*>(r1::cache_aligned_allocate(n * sizeof(value_type)));
55     }
56 
57     //! Free block of memory that starts on a cache line
58     void deallocate(T* p, std::size_t) {
59         r1::cache_aligned_deallocate(p);
60     }
61 
62     //! Largest value for which method allocate might succeed.
63     std::size_t max_size() const noexcept {
64         return (~std::size_t(0) - r1::cache_line_size()) / sizeof(value_type);
65     }
66 
67 #if TBB_ALLOCATOR_TRAITS_BROKEN
68     using pointer = value_type*;
69     using const_pointer = const value_type*;
70     using reference = value_type&;
71     using const_reference = const value_type&;
72     using difference_type = std::ptrdiff_t;
73     using size_type = std::size_t;
74     template<typename U> struct rebind {
75         using other = cache_aligned_allocator<U>;
76     };
77     template<typename U, typename... Args>
78     void construct(U *p, Args&&... args)
79         { ::new (p) U(std::forward<Args>(args)...); }
80     void destroy(pointer p) { p->~value_type(); }
81     pointer address(reference x) const { return &x; }
82     const_pointer address(const_reference x) const { return &x; }
83 #endif // TBB_ALLOCATOR_TRAITS_BROKEN
84 };
85 
86 #if TBB_ALLOCATOR_TRAITS_BROKEN
87     template<>
88     class cache_aligned_allocator<void> {
89     public:
90         using pointer = void*;
91         using const_pointer = const void*;
92         using value_type = void;
93         template<typename U> struct rebind {
94             using other = cache_aligned_allocator<U>;
95         };
96     };
97 #endif
98 
99 template<typename T, typename U>
100 bool operator==(const cache_aligned_allocator<T>&, const cache_aligned_allocator<U>&) noexcept { return true; }
101 
102 template<typename T, typename U>
103 bool operator!=(const cache_aligned_allocator<T>&, const cache_aligned_allocator<U>&) noexcept { return false; }
104 
105 #if __TBB_CPP17_MEMORY_RESOURCE_PRESENT
106 
107 //! C++17 memory resource wrapper to ensure cache line size alignment
108 class cache_aligned_resource : public std::pmr::memory_resource {
109 public:
110     cache_aligned_resource() : cache_aligned_resource(std::pmr::get_default_resource()) {}
111     explicit cache_aligned_resource(std::pmr::memory_resource* upstream) : m_upstream(upstream) {}
112 
113     std::pmr::memory_resource* upstream_resource() const {
114         return m_upstream;
115     }
116 
117 private:
118     //! We don't know what memory resource set. Use padding to guarantee alignment
119     void* do_allocate(std::size_t bytes, std::size_t alignment) override {
120         // TODO: make it common with tbb_allocator.cpp
121         std::size_t cache_line_alignment = correct_alignment(alignment);
122         std::size_t space = correct_size(bytes) + cache_line_alignment;
123         std::uintptr_t base = reinterpret_cast<std::uintptr_t>(m_upstream->allocate(space));
124         __TBB_ASSERT(base != 0, "Upstream resource returned NULL.");
125 
126         // Round up to the next cache line (align the base address)
127         std::uintptr_t result = (base + cache_line_alignment) & ~(cache_line_alignment - 1);
128         __TBB_ASSERT((result - base) >= sizeof(std::uintptr_t), "Can`t store a base pointer to the header");
129         __TBB_ASSERT(space - (result - base) >= bytes, "Not enough space for the storage");
130 
131         // Record where block actually starts.
132         (reinterpret_cast<std::uintptr_t*>(result))[-1] = base;
133         return reinterpret_cast<void*>(result);
134     }
135 
136     void do_deallocate(void* ptr, std::size_t bytes, std::size_t alignment) override {
137         if (ptr) {
138             // Recover where block actually starts
139             std::uintptr_t base = (reinterpret_cast<std::uintptr_t*>(ptr))[-1];
140             m_upstream->deallocate(reinterpret_cast<void*>(base), correct_size(bytes) + correct_alignment(alignment));
141         }
142     }
143 
144     bool do_is_equal(const std::pmr::memory_resource& other) const noexcept override {
145         if (this == &other) { return true; }
146 #if __TBB_USE_OPTIONAL_RTTI
147         const cache_aligned_resource* other_res = dynamic_cast<const cache_aligned_resource*>(&other);
148         return other_res && (upstream_resource() == other_res->upstream_resource());
149 #else
150         return false;
151 #endif
152     }
153 
154     std::size_t correct_alignment(std::size_t alignment) {
155         __TBB_ASSERT(tbb::detail::is_power_of_two(alignment), "Alignment is not a power of 2");
156 #if __TBB_CPP17_HW_INTERFERENCE_SIZE_PRESENT
157         std::size_t cache_line_size = std::hardware_destructive_interference_size;
158 #else
159         std::size_t cache_line_size = r1::cache_line_size();
160 #endif
161         return alignment < cache_line_size ? cache_line_size : alignment;
162     }
163 
164     std::size_t correct_size(std::size_t bytes) {
165         // To handle the case, when small size requested. There could be not
166         // enough space to store the original pointer.
167         return bytes < sizeof(std::uintptr_t) ? sizeof(std::uintptr_t) : bytes;
168     }
169 
170     std::pmr::memory_resource* m_upstream;
171 };
172 
173 #endif // __TBB_CPP17_MEMORY_RESOURCE_PRESENT
174 
175 } // namespace d1
176 } // namespace detail
177 
178 inline namespace v1 {
179 using detail::d1::cache_aligned_allocator;
180 #if __TBB_CPP17_MEMORY_RESOURCE_PRESENT
181 using detail::d1::cache_aligned_resource;
182 #endif
183 } // namespace v1
184 } // namespace tbb
185 
186 #endif /* __TBB_cache_aligned_allocator_H */
187 
188