1 //===- llvm/unittest/Support/AllocatorTest.cpp - BumpPtrAllocator tests ---===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 9 #include "llvm/Support/Memory.h" 10 #include "llvm/Support/Process.h" 11 #include "gtest/gtest.h" 12 #include <cassert> 13 #include <cstdlib> 14 15 #if defined(__NetBSD__) 16 // clang-format off 17 #include <sys/param.h> 18 #include <sys/types.h> 19 #include <sys/sysctl.h> 20 #include <err.h> 21 #include <unistd.h> 22 // clang-format on 23 #endif 24 25 using namespace llvm; 26 using namespace sys; 27 28 namespace { 29 30 bool IsMPROTECT() { 31 #if defined(__NetBSD__) 32 int mib[3]; 33 int paxflags; 34 size_t len = sizeof(paxflags); 35 36 mib[0] = CTL_PROC; 37 mib[1] = getpid(); 38 mib[2] = PROC_PID_PAXFLAGS; 39 40 if (sysctl(mib, 3, &paxflags, &len, NULL, 0) != 0) 41 err(EXIT_FAILURE, "sysctl"); 42 43 return !!(paxflags & CTL_PROC_PAXFLAGS_MPROTECT); 44 #elif defined(__APPLE__) && defined(__aarch64__) 45 return true; 46 #else 47 return false; 48 #endif 49 } 50 51 class MappedMemoryTest : public ::testing::TestWithParam<unsigned> { 52 public: 53 MappedMemoryTest() { 54 Flags = GetParam(); 55 PageSize = sys::Process::getPageSizeEstimate(); 56 } 57 58 protected: 59 // Adds RW flags to permit testing of the resulting memory 60 unsigned getTestableEquivalent(unsigned RequestedFlags) { 61 switch (RequestedFlags) { 62 case Memory::MF_READ: 63 case Memory::MF_WRITE: 64 case Memory::MF_READ|Memory::MF_WRITE: 65 return Memory::MF_READ|Memory::MF_WRITE; 66 case Memory::MF_READ|Memory::MF_EXEC: 67 case Memory::MF_READ|Memory::MF_WRITE|Memory::MF_EXEC: 68 case Memory::MF_EXEC: 69 return Memory::MF_READ|Memory::MF_WRITE|Memory::MF_EXEC; 70 } 71 // Default in case values are added to the enum, as required by some compilers 72 return Memory::MF_READ|Memory::MF_WRITE; 73 } 74 75 // Returns true if the memory blocks overlap 76 bool doesOverlap(MemoryBlock M1, MemoryBlock M2) { 77 if (M1.base() == M2.base()) 78 return true; 79 80 if (M1.base() > M2.base()) 81 return (unsigned char *)M2.base() + M2.allocatedSize() > M1.base(); 82 83 return (unsigned char *)M1.base() + M1.allocatedSize() > M2.base(); 84 } 85 86 unsigned Flags; 87 size_t PageSize; 88 }; 89 90 // MPROTECT prevents W+X mmaps 91 #define CHECK_UNSUPPORTED() \ 92 do { \ 93 if ((Flags & Memory::MF_WRITE) && (Flags & Memory::MF_EXEC) && \ 94 IsMPROTECT()) \ 95 return; \ 96 } while (0) 97 98 TEST_P(MappedMemoryTest, AllocAndRelease) { 99 CHECK_UNSUPPORTED(); 100 std::error_code EC; 101 MemoryBlock M1 = Memory::allocateMappedMemory(sizeof(int), nullptr, Flags,EC); 102 EXPECT_EQ(std::error_code(), EC); 103 104 EXPECT_NE((void*)nullptr, M1.base()); 105 EXPECT_LE(sizeof(int), M1.allocatedSize()); 106 107 EXPECT_FALSE(Memory::releaseMappedMemory(M1)); 108 } 109 110 TEST_P(MappedMemoryTest, AllocAndReleaseHuge) { 111 CHECK_UNSUPPORTED(); 112 std::error_code EC; 113 MemoryBlock M1 = Memory::allocateMappedMemory( 114 sizeof(int), nullptr, Flags | Memory::MF_HUGE_HINT, EC); 115 EXPECT_EQ(std::error_code(), EC); 116 117 // Test large/huge memory pages. In the worst case, 4kb pages should be 118 // returned, if large pages aren't available. 119 120 EXPECT_NE((void *)nullptr, M1.base()); 121 EXPECT_LE(sizeof(int), M1.allocatedSize()); 122 123 EXPECT_FALSE(Memory::releaseMappedMemory(M1)); 124 } 125 126 TEST_P(MappedMemoryTest, MultipleAllocAndRelease) { 127 CHECK_UNSUPPORTED(); 128 std::error_code EC; 129 MemoryBlock M1 = Memory::allocateMappedMemory(16, nullptr, Flags, EC); 130 EXPECT_EQ(std::error_code(), EC); 131 MemoryBlock M2 = Memory::allocateMappedMemory(64, nullptr, Flags, EC); 132 EXPECT_EQ(std::error_code(), EC); 133 MemoryBlock M3 = Memory::allocateMappedMemory(32, nullptr, Flags, EC); 134 EXPECT_EQ(std::error_code(), EC); 135 136 EXPECT_NE((void*)nullptr, M1.base()); 137 EXPECT_LE(16U, M1.allocatedSize()); 138 EXPECT_NE((void*)nullptr, M2.base()); 139 EXPECT_LE(64U, M2.allocatedSize()); 140 EXPECT_NE((void*)nullptr, M3.base()); 141 EXPECT_LE(32U, M3.allocatedSize()); 142 143 EXPECT_FALSE(doesOverlap(M1, M2)); 144 EXPECT_FALSE(doesOverlap(M2, M3)); 145 EXPECT_FALSE(doesOverlap(M1, M3)); 146 147 EXPECT_FALSE(Memory::releaseMappedMemory(M1)); 148 EXPECT_FALSE(Memory::releaseMappedMemory(M3)); 149 MemoryBlock M4 = Memory::allocateMappedMemory(16, nullptr, Flags, EC); 150 EXPECT_EQ(std::error_code(), EC); 151 EXPECT_NE((void*)nullptr, M4.base()); 152 EXPECT_LE(16U, M4.allocatedSize()); 153 EXPECT_FALSE(Memory::releaseMappedMemory(M4)); 154 EXPECT_FALSE(Memory::releaseMappedMemory(M2)); 155 } 156 157 TEST_P(MappedMemoryTest, BasicWrite) { 158 // This test applies only to readable and writeable combinations 159 if (Flags && 160 !((Flags & Memory::MF_READ) && (Flags & Memory::MF_WRITE))) 161 return; 162 CHECK_UNSUPPORTED(); 163 164 std::error_code EC; 165 MemoryBlock M1 = Memory::allocateMappedMemory(sizeof(int), nullptr, Flags,EC); 166 EXPECT_EQ(std::error_code(), EC); 167 168 EXPECT_NE((void*)nullptr, M1.base()); 169 EXPECT_LE(sizeof(int), M1.allocatedSize()); 170 171 int *a = (int*)M1.base(); 172 *a = 1; 173 EXPECT_EQ(1, *a); 174 175 EXPECT_FALSE(Memory::releaseMappedMemory(M1)); 176 } 177 178 TEST_P(MappedMemoryTest, MultipleWrite) { 179 // This test applies only to readable and writeable combinations 180 if (Flags && 181 !((Flags & Memory::MF_READ) && (Flags & Memory::MF_WRITE))) 182 return; 183 CHECK_UNSUPPORTED(); 184 185 std::error_code EC; 186 MemoryBlock M1 = Memory::allocateMappedMemory(sizeof(int), nullptr, Flags, 187 EC); 188 EXPECT_EQ(std::error_code(), EC); 189 MemoryBlock M2 = Memory::allocateMappedMemory(8 * sizeof(int), nullptr, Flags, 190 EC); 191 EXPECT_EQ(std::error_code(), EC); 192 MemoryBlock M3 = Memory::allocateMappedMemory(4 * sizeof(int), nullptr, Flags, 193 EC); 194 EXPECT_EQ(std::error_code(), EC); 195 196 EXPECT_FALSE(doesOverlap(M1, M2)); 197 EXPECT_FALSE(doesOverlap(M2, M3)); 198 EXPECT_FALSE(doesOverlap(M1, M3)); 199 200 EXPECT_NE((void*)nullptr, M1.base()); 201 EXPECT_LE(1U * sizeof(int), M1.allocatedSize()); 202 EXPECT_NE((void*)nullptr, M2.base()); 203 EXPECT_LE(8U * sizeof(int), M2.allocatedSize()); 204 EXPECT_NE((void*)nullptr, M3.base()); 205 EXPECT_LE(4U * sizeof(int), M3.allocatedSize()); 206 207 int *x = (int*)M1.base(); 208 *x = 1; 209 210 int *y = (int*)M2.base(); 211 for (int i = 0; i < 8; i++) { 212 y[i] = i; 213 } 214 215 int *z = (int*)M3.base(); 216 *z = 42; 217 218 EXPECT_EQ(1, *x); 219 EXPECT_EQ(7, y[7]); 220 EXPECT_EQ(42, *z); 221 222 EXPECT_FALSE(Memory::releaseMappedMemory(M1)); 223 EXPECT_FALSE(Memory::releaseMappedMemory(M3)); 224 225 MemoryBlock M4 = Memory::allocateMappedMemory(64 * sizeof(int), nullptr, 226 Flags, EC); 227 EXPECT_EQ(std::error_code(), EC); 228 EXPECT_NE((void*)nullptr, M4.base()); 229 EXPECT_LE(64U * sizeof(int), M4.allocatedSize()); 230 x = (int*)M4.base(); 231 *x = 4; 232 EXPECT_EQ(4, *x); 233 EXPECT_FALSE(Memory::releaseMappedMemory(M4)); 234 235 // Verify that M2 remains unaffected by other activity 236 for (int i = 0; i < 8; i++) { 237 EXPECT_EQ(i, y[i]); 238 } 239 EXPECT_FALSE(Memory::releaseMappedMemory(M2)); 240 } 241 242 TEST_P(MappedMemoryTest, EnabledWrite) { 243 // MPROTECT prevents W+X, and since this test always adds W we need 244 // to block any variant with X. 245 if ((Flags & Memory::MF_EXEC) && IsMPROTECT()) 246 return; 247 248 std::error_code EC; 249 MemoryBlock M1 = Memory::allocateMappedMemory(2 * sizeof(int), nullptr, Flags, 250 EC); 251 EXPECT_EQ(std::error_code(), EC); 252 MemoryBlock M2 = Memory::allocateMappedMemory(8 * sizeof(int), nullptr, Flags, 253 EC); 254 EXPECT_EQ(std::error_code(), EC); 255 MemoryBlock M3 = Memory::allocateMappedMemory(4 * sizeof(int), nullptr, Flags, 256 EC); 257 EXPECT_EQ(std::error_code(), EC); 258 259 EXPECT_NE((void*)nullptr, M1.base()); 260 EXPECT_LE(2U * sizeof(int), M1.allocatedSize()); 261 EXPECT_NE((void*)nullptr, M2.base()); 262 EXPECT_LE(8U * sizeof(int), M2.allocatedSize()); 263 EXPECT_NE((void*)nullptr, M3.base()); 264 EXPECT_LE(4U * sizeof(int), M3.allocatedSize()); 265 266 EXPECT_FALSE(Memory::protectMappedMemory(M1, getTestableEquivalent(Flags))); 267 EXPECT_FALSE(Memory::protectMappedMemory(M2, getTestableEquivalent(Flags))); 268 EXPECT_FALSE(Memory::protectMappedMemory(M3, getTestableEquivalent(Flags))); 269 270 EXPECT_FALSE(doesOverlap(M1, M2)); 271 EXPECT_FALSE(doesOverlap(M2, M3)); 272 EXPECT_FALSE(doesOverlap(M1, M3)); 273 274 int *x = (int*)M1.base(); 275 *x = 1; 276 int *y = (int*)M2.base(); 277 for (unsigned int i = 0; i < 8; i++) { 278 y[i] = i; 279 } 280 int *z = (int*)M3.base(); 281 *z = 42; 282 283 EXPECT_EQ(1, *x); 284 EXPECT_EQ(7, y[7]); 285 EXPECT_EQ(42, *z); 286 287 EXPECT_FALSE(Memory::releaseMappedMemory(M1)); 288 EXPECT_FALSE(Memory::releaseMappedMemory(M3)); 289 EXPECT_EQ(6, y[6]); 290 291 MemoryBlock M4 = Memory::allocateMappedMemory(16, nullptr, Flags, EC); 292 EXPECT_EQ(std::error_code(), EC); 293 EXPECT_NE((void*)nullptr, M4.base()); 294 EXPECT_LE(16U, M4.allocatedSize()); 295 EXPECT_EQ(std::error_code(), 296 Memory::protectMappedMemory(M4, getTestableEquivalent(Flags))); 297 x = (int*)M4.base(); 298 *x = 4; 299 EXPECT_EQ(4, *x); 300 EXPECT_FALSE(Memory::releaseMappedMemory(M4)); 301 EXPECT_FALSE(Memory::releaseMappedMemory(M2)); 302 } 303 304 TEST_P(MappedMemoryTest, SuccessiveNear) { 305 CHECK_UNSUPPORTED(); 306 std::error_code EC; 307 MemoryBlock M1 = Memory::allocateMappedMemory(16, nullptr, Flags, EC); 308 EXPECT_EQ(std::error_code(), EC); 309 MemoryBlock M2 = Memory::allocateMappedMemory(64, &M1, Flags, EC); 310 EXPECT_EQ(std::error_code(), EC); 311 MemoryBlock M3 = Memory::allocateMappedMemory(32, &M2, Flags, EC); 312 EXPECT_EQ(std::error_code(), EC); 313 314 EXPECT_NE((void*)nullptr, M1.base()); 315 EXPECT_LE(16U, M1.allocatedSize()); 316 EXPECT_NE((void*)nullptr, M2.base()); 317 EXPECT_LE(64U, M2.allocatedSize()); 318 EXPECT_NE((void*)nullptr, M3.base()); 319 EXPECT_LE(32U, M3.allocatedSize()); 320 321 EXPECT_FALSE(doesOverlap(M1, M2)); 322 EXPECT_FALSE(doesOverlap(M2, M3)); 323 EXPECT_FALSE(doesOverlap(M1, M3)); 324 325 EXPECT_FALSE(Memory::releaseMappedMemory(M1)); 326 EXPECT_FALSE(Memory::releaseMappedMemory(M3)); 327 EXPECT_FALSE(Memory::releaseMappedMemory(M2)); 328 } 329 330 TEST_P(MappedMemoryTest, DuplicateNear) { 331 CHECK_UNSUPPORTED(); 332 std::error_code EC; 333 MemoryBlock Near((void*)(3*PageSize), 16); 334 MemoryBlock M1 = Memory::allocateMappedMemory(16, &Near, Flags, EC); 335 EXPECT_EQ(std::error_code(), EC); 336 MemoryBlock M2 = Memory::allocateMappedMemory(64, &Near, Flags, EC); 337 EXPECT_EQ(std::error_code(), EC); 338 MemoryBlock M3 = Memory::allocateMappedMemory(32, &Near, Flags, EC); 339 EXPECT_EQ(std::error_code(), EC); 340 341 EXPECT_NE((void*)nullptr, M1.base()); 342 EXPECT_LE(16U, M1.allocatedSize()); 343 EXPECT_NE((void*)nullptr, M2.base()); 344 EXPECT_LE(64U, M2.allocatedSize()); 345 EXPECT_NE((void*)nullptr, M3.base()); 346 EXPECT_LE(32U, M3.allocatedSize()); 347 348 EXPECT_FALSE(Memory::releaseMappedMemory(M1)); 349 EXPECT_FALSE(Memory::releaseMappedMemory(M3)); 350 EXPECT_FALSE(Memory::releaseMappedMemory(M2)); 351 } 352 353 TEST_P(MappedMemoryTest, ZeroNear) { 354 CHECK_UNSUPPORTED(); 355 std::error_code EC; 356 MemoryBlock Near(nullptr, 0); 357 MemoryBlock M1 = Memory::allocateMappedMemory(16, &Near, Flags, EC); 358 EXPECT_EQ(std::error_code(), EC); 359 MemoryBlock M2 = Memory::allocateMappedMemory(64, &Near, Flags, EC); 360 EXPECT_EQ(std::error_code(), EC); 361 MemoryBlock M3 = Memory::allocateMappedMemory(32, &Near, Flags, EC); 362 EXPECT_EQ(std::error_code(), EC); 363 364 EXPECT_NE((void*)nullptr, M1.base()); 365 EXPECT_LE(16U, M1.allocatedSize()); 366 EXPECT_NE((void*)nullptr, M2.base()); 367 EXPECT_LE(64U, M2.allocatedSize()); 368 EXPECT_NE((void*)nullptr, M3.base()); 369 EXPECT_LE(32U, M3.allocatedSize()); 370 371 EXPECT_FALSE(doesOverlap(M1, M2)); 372 EXPECT_FALSE(doesOverlap(M2, M3)); 373 EXPECT_FALSE(doesOverlap(M1, M3)); 374 375 EXPECT_FALSE(Memory::releaseMappedMemory(M1)); 376 EXPECT_FALSE(Memory::releaseMappedMemory(M3)); 377 EXPECT_FALSE(Memory::releaseMappedMemory(M2)); 378 } 379 380 TEST_P(MappedMemoryTest, ZeroSizeNear) { 381 CHECK_UNSUPPORTED(); 382 std::error_code EC; 383 MemoryBlock Near((void*)(4*PageSize), 0); 384 MemoryBlock M1 = Memory::allocateMappedMemory(16, &Near, Flags, EC); 385 EXPECT_EQ(std::error_code(), EC); 386 MemoryBlock M2 = Memory::allocateMappedMemory(64, &Near, Flags, EC); 387 EXPECT_EQ(std::error_code(), EC); 388 MemoryBlock M3 = Memory::allocateMappedMemory(32, &Near, Flags, EC); 389 EXPECT_EQ(std::error_code(), EC); 390 391 EXPECT_NE((void*)nullptr, M1.base()); 392 EXPECT_LE(16U, M1.allocatedSize()); 393 EXPECT_NE((void*)nullptr, M2.base()); 394 EXPECT_LE(64U, M2.allocatedSize()); 395 EXPECT_NE((void*)nullptr, M3.base()); 396 EXPECT_LE(32U, M3.allocatedSize()); 397 398 EXPECT_FALSE(doesOverlap(M1, M2)); 399 EXPECT_FALSE(doesOverlap(M2, M3)); 400 EXPECT_FALSE(doesOverlap(M1, M3)); 401 402 EXPECT_FALSE(Memory::releaseMappedMemory(M1)); 403 EXPECT_FALSE(Memory::releaseMappedMemory(M3)); 404 EXPECT_FALSE(Memory::releaseMappedMemory(M2)); 405 } 406 407 TEST_P(MappedMemoryTest, UnalignedNear) { 408 CHECK_UNSUPPORTED(); 409 std::error_code EC; 410 MemoryBlock Near((void*)(2*PageSize+5), 0); 411 MemoryBlock M1 = Memory::allocateMappedMemory(15, &Near, Flags, EC); 412 EXPECT_EQ(std::error_code(), EC); 413 414 EXPECT_NE((void*)nullptr, M1.base()); 415 EXPECT_LE(sizeof(int), M1.allocatedSize()); 416 417 EXPECT_FALSE(Memory::releaseMappedMemory(M1)); 418 } 419 420 // Note that Memory::MF_WRITE is not supported exclusively across 421 // operating systems and architectures and can imply MF_READ|MF_WRITE 422 unsigned MemoryFlags[] = { 423 Memory::MF_READ, 424 Memory::MF_WRITE, 425 Memory::MF_READ|Memory::MF_WRITE, 426 Memory::MF_EXEC, 427 Memory::MF_READ|Memory::MF_EXEC, 428 Memory::MF_READ|Memory::MF_WRITE|Memory::MF_EXEC 429 }; 430 431 INSTANTIATE_TEST_SUITE_P(AllocationTests, MappedMemoryTest, 432 ::testing::ValuesIn(MemoryFlags)); 433 434 } // anonymous namespace 435