1 //===- FuzzerTracePC.cpp - PC tracing--------------------------------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // Trace PCs. 9 // This module implements __sanitizer_cov_trace_pc_guard[_init], 10 // the callback required for -fsanitize-coverage=trace-pc-guard instrumentation. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "FuzzerTracePC.h" 15 #include "FuzzerBuiltins.h" 16 #include "FuzzerBuiltinsMsvc.h" 17 #include "FuzzerCorpus.h" 18 #include "FuzzerDefs.h" 19 #include "FuzzerDictionary.h" 20 #include "FuzzerExtFunctions.h" 21 #include "FuzzerIO.h" 22 #include "FuzzerUtil.h" 23 #include "FuzzerValueBitMap.h" 24 #include <set> 25 26 // Used by -fsanitize-coverage=stack-depth to track stack depth 27 ATTRIBUTES_INTERFACE_TLS_INITIAL_EXEC uintptr_t __sancov_lowest_stack; 28 29 namespace fuzzer { 30 31 TracePC TPC; 32 33 size_t TracePC::GetTotalPCCoverage() { 34 return ObservedPCs.size(); 35 } 36 37 38 void TracePC::HandleInline8bitCountersInit(uint8_t *Start, uint8_t *Stop) { 39 if (Start == Stop) return; 40 if (NumModules && 41 Modules[NumModules - 1].Start() == Start) 42 return; 43 assert(NumModules < 44 sizeof(Modules) / sizeof(Modules[0])); 45 auto &M = Modules[NumModules++]; 46 uint8_t *AlignedStart = RoundUpByPage(Start); 47 uint8_t *AlignedStop = RoundDownByPage(Stop); 48 size_t NumFullPages = AlignedStop > AlignedStart ? 49 (AlignedStop - AlignedStart) / PageSize() : 0; 50 bool NeedFirst = Start < AlignedStart || !NumFullPages; 51 bool NeedLast = Stop > AlignedStop && AlignedStop >= AlignedStart; 52 M.NumRegions = NumFullPages + NeedFirst + NeedLast;; 53 assert(M.NumRegions > 0); 54 M.Regions = new Module::Region[M.NumRegions]; 55 assert(M.Regions); 56 size_t R = 0; 57 if (NeedFirst) 58 M.Regions[R++] = {Start, std::min(Stop, AlignedStart), true, false}; 59 for (uint8_t *P = AlignedStart; P < AlignedStop; P += PageSize()) 60 M.Regions[R++] = {P, P + PageSize(), true, true}; 61 if (NeedLast) 62 M.Regions[R++] = {AlignedStop, Stop, true, false}; 63 assert(R == M.NumRegions); 64 assert(M.Size() == (size_t)(Stop - Start)); 65 assert(M.Stop() == Stop); 66 assert(M.Start() == Start); 67 NumInline8bitCounters += M.Size(); 68 } 69 70 // Mark all full page counter regions as PROT_NONE and set Enabled=false. 71 // The first time the instrumented code hits such a protected/disabled 72 // counter region we should catch a SEGV and call UnprotectLazyCounters, 73 // which will mark the page as PROT_READ|PROT_WRITE and set Enabled=true. 74 // 75 // Whenever other functions iterate over the counters they should ignore 76 // regions with Enabled=false. 77 void TracePC::ProtectLazyCounters() { 78 size_t NumPagesProtected = 0; 79 IterateCounterRegions([&](Module::Region &R) { 80 if (!R.OneFullPage) return; 81 if (Mprotect(R.Start, R.Stop - R.Start, false)) { 82 R.Enabled = false; 83 NumPagesProtected++; 84 } 85 }); 86 if (NumPagesProtected) 87 Printf("INFO: %zd pages of counters where protected;" 88 " libFuzzer's SEGV handler must be installed\n", 89 NumPagesProtected); 90 } 91 92 bool TracePC::UnprotectLazyCounters(void *CounterPtr) { 93 // Printf("UnprotectLazyCounters: %p\n", CounterPtr); 94 if (!CounterPtr) 95 return false; 96 bool Done = false; 97 uint8_t *Addr = reinterpret_cast<uint8_t *>(CounterPtr); 98 IterateCounterRegions([&](Module::Region &R) { 99 if (!R.OneFullPage || R.Enabled || Done) return; 100 if (Addr >= R.Start && Addr < R.Stop) 101 if (Mprotect(R.Start, R.Stop - R.Start, true)) { 102 R.Enabled = true; 103 Done = true; 104 } 105 }); 106 return Done; 107 } 108 109 void TracePC::HandlePCsInit(const uintptr_t *Start, const uintptr_t *Stop) { 110 const PCTableEntry *B = reinterpret_cast<const PCTableEntry *>(Start); 111 const PCTableEntry *E = reinterpret_cast<const PCTableEntry *>(Stop); 112 if (NumPCTables && ModulePCTable[NumPCTables - 1].Start == B) return; 113 assert(NumPCTables < sizeof(ModulePCTable) / sizeof(ModulePCTable[0])); 114 ModulePCTable[NumPCTables++] = {B, E}; 115 NumPCsInPCTables += E - B; 116 } 117 118 void TracePC::PrintModuleInfo() { 119 if (NumModules) { 120 Printf("INFO: Loaded %zd modules (%zd inline 8-bit counters): ", 121 NumModules, NumInline8bitCounters); 122 for (size_t i = 0; i < NumModules; i++) 123 Printf("%zd [%p, %p), ", Modules[i].Size(), Modules[i].Start(), 124 Modules[i].Stop()); 125 Printf("\n"); 126 } 127 if (NumPCTables) { 128 Printf("INFO: Loaded %zd PC tables (%zd PCs): ", NumPCTables, 129 NumPCsInPCTables); 130 for (size_t i = 0; i < NumPCTables; i++) { 131 Printf("%zd [%p,%p), ", ModulePCTable[i].Stop - ModulePCTable[i].Start, 132 ModulePCTable[i].Start, ModulePCTable[i].Stop); 133 } 134 Printf("\n"); 135 136 if (NumInline8bitCounters && NumInline8bitCounters != NumPCsInPCTables) { 137 Printf("ERROR: The size of coverage PC tables does not match the\n" 138 "number of instrumented PCs. This might be a compiler bug,\n" 139 "please contact the libFuzzer developers.\n" 140 "Also check https://bugs.llvm.org/show_bug.cgi?id=34636\n" 141 "for possible workarounds (tl;dr: don't use the old GNU ld)\n"); 142 _Exit(1); 143 } 144 } 145 if (size_t NumExtraCounters = ExtraCountersEnd() - ExtraCountersBegin()) 146 Printf("INFO: %zd Extra Counters\n", NumExtraCounters); 147 } 148 149 ATTRIBUTE_NO_SANITIZE_ALL 150 void TracePC::HandleCallerCallee(uintptr_t Caller, uintptr_t Callee) { 151 const uintptr_t kBits = 12; 152 const uintptr_t kMask = (1 << kBits) - 1; 153 uintptr_t Idx = (Caller & kMask) | ((Callee & kMask) << kBits); 154 ValueProfileMap.AddValueModPrime(Idx); 155 } 156 157 /// \return the address of the previous instruction. 158 /// Note: the logic is copied from `sanitizer_common/sanitizer_stacktrace.h` 159 inline ALWAYS_INLINE uintptr_t GetPreviousInstructionPc(uintptr_t PC) { 160 #if defined(__arm__) 161 // T32 (Thumb) branch instructions might be 16 or 32 bit long, 162 // so we return (pc-2) in that case in order to be safe. 163 // For A32 mode we return (pc-4) because all instructions are 32 bit long. 164 return (PC - 3) & (~1); 165 #elif defined(__powerpc__) || defined(__powerpc64__) || defined(__aarch64__) 166 // PCs are always 4 byte aligned. 167 return PC - 4; 168 #elif defined(__sparc__) || defined(__mips__) 169 return PC - 8; 170 #else 171 return PC - 1; 172 #endif 173 } 174 175 /// \return the address of the next instruction. 176 /// Note: the logic is copied from `sanitizer_common/sanitizer_stacktrace.cc` 177 inline ALWAYS_INLINE uintptr_t GetNextInstructionPc(uintptr_t PC) { 178 #if defined(__mips__) 179 return PC + 8; 180 #elif defined(__powerpc__) || defined(__sparc__) || defined(__arm__) || \ 181 defined(__aarch64__) 182 return PC + 4; 183 #else 184 return PC + 1; 185 #endif 186 } 187 188 void TracePC::UpdateObservedPCs() { 189 Vector<uintptr_t> CoveredFuncs; 190 auto ObservePC = [&](uintptr_t PC) { 191 if (ObservedPCs.insert(PC).second && DoPrintNewPCs) { 192 PrintPC("\tNEW_PC: %p %F %L", "\tNEW_PC: %p", GetNextInstructionPc(PC)); 193 Printf("\n"); 194 } 195 }; 196 197 auto Observe = [&](const PCTableEntry &TE) { 198 if (TE.PCFlags & 1) 199 if (++ObservedFuncs[TE.PC] == 1 && NumPrintNewFuncs) 200 CoveredFuncs.push_back(TE.PC); 201 ObservePC(TE.PC); 202 }; 203 204 if (NumPCsInPCTables) { 205 if (NumInline8bitCounters == NumPCsInPCTables) { 206 for (size_t i = 0; i < NumModules; i++) { 207 auto &M = Modules[i]; 208 assert(M.Size() == 209 (size_t)(ModulePCTable[i].Stop - ModulePCTable[i].Start)); 210 for (size_t r = 0; r < M.NumRegions; r++) { 211 auto &R = M.Regions[r]; 212 if (!R.Enabled) continue; 213 for (uint8_t *P = R.Start; P < R.Stop; P++) 214 if (*P) 215 Observe(ModulePCTable[i].Start[M.Idx(P)]); 216 } 217 } 218 } 219 } 220 221 for (size_t i = 0, N = Min(CoveredFuncs.size(), NumPrintNewFuncs); i < N; 222 i++) { 223 Printf("\tNEW_FUNC[%zd/%zd]: ", i + 1, CoveredFuncs.size()); 224 PrintPC("%p %F %L", "%p", GetNextInstructionPc(CoveredFuncs[i])); 225 Printf("\n"); 226 } 227 } 228 229 230 static std::string GetModuleName(uintptr_t PC) { 231 char ModulePathRaw[4096] = ""; // What's PATH_MAX in portable C++? 232 void *OffsetRaw = nullptr; 233 if (!EF->__sanitizer_get_module_and_offset_for_pc( 234 reinterpret_cast<void *>(PC), ModulePathRaw, 235 sizeof(ModulePathRaw), &OffsetRaw)) 236 return ""; 237 return ModulePathRaw; 238 } 239 240 template<class CallBack> 241 void TracePC::IterateCoveredFunctions(CallBack CB) { 242 for (size_t i = 0; i < NumPCTables; i++) { 243 auto &M = ModulePCTable[i]; 244 assert(M.Start < M.Stop); 245 auto ModuleName = GetModuleName(M.Start->PC); 246 for (auto NextFE = M.Start; NextFE < M.Stop; ) { 247 auto FE = NextFE; 248 assert((FE->PCFlags & 1) && "Not a function entry point"); 249 do { 250 NextFE++; 251 } while (NextFE < M.Stop && !(NextFE->PCFlags & 1)); 252 CB(FE, NextFE, ObservedFuncs[FE->PC]); 253 } 254 } 255 } 256 257 void TracePC::SetFocusFunction(const std::string &FuncName) { 258 // This function should be called once. 259 assert(!FocusFunctionCounterPtr); 260 if (FuncName.empty()) 261 return; 262 for (size_t M = 0; M < NumModules; M++) { 263 auto &PCTE = ModulePCTable[M]; 264 size_t N = PCTE.Stop - PCTE.Start; 265 for (size_t I = 0; I < N; I++) { 266 if (!(PCTE.Start[I].PCFlags & 1)) continue; // not a function entry. 267 auto Name = DescribePC("%F", GetNextInstructionPc(PCTE.Start[I].PC)); 268 if (Name[0] == 'i' && Name[1] == 'n' && Name[2] == ' ') 269 Name = Name.substr(3, std::string::npos); 270 if (FuncName != Name) continue; 271 Printf("INFO: Focus function is set to '%s'\n", Name.c_str()); 272 FocusFunctionCounterPtr = Modules[M].Start() + I; 273 return; 274 } 275 } 276 } 277 278 bool TracePC::ObservedFocusFunction() { 279 return FocusFunctionCounterPtr && *FocusFunctionCounterPtr; 280 } 281 282 void TracePC::PrintCoverage() { 283 if (!EF->__sanitizer_symbolize_pc || 284 !EF->__sanitizer_get_module_and_offset_for_pc) { 285 Printf("INFO: __sanitizer_symbolize_pc or " 286 "__sanitizer_get_module_and_offset_for_pc is not available," 287 " not printing coverage\n"); 288 return; 289 } 290 Printf("COVERAGE:\n"); 291 auto CoveredFunctionCallback = [&](const PCTableEntry *First, 292 const PCTableEntry *Last, 293 uintptr_t Counter) { 294 assert(First < Last); 295 auto VisualizePC = GetNextInstructionPc(First->PC); 296 std::string FileStr = DescribePC("%s", VisualizePC); 297 if (!IsInterestingCoverageFile(FileStr)) 298 return; 299 std::string FunctionStr = DescribePC("%F", VisualizePC); 300 if (FunctionStr.find("in ") == 0) 301 FunctionStr = FunctionStr.substr(3); 302 std::string LineStr = DescribePC("%l", VisualizePC); 303 size_t Line = std::stoul(LineStr); 304 size_t NumEdges = Last - First; 305 Vector<uintptr_t> UncoveredPCs; 306 for (auto TE = First; TE < Last; TE++) 307 if (!ObservedPCs.count(TE->PC)) 308 UncoveredPCs.push_back(TE->PC); 309 Printf("%sCOVERED_FUNC: hits: %zd", Counter ? "" : "UN", Counter); 310 Printf(" edges: %zd/%zd", NumEdges - UncoveredPCs.size(), NumEdges); 311 Printf(" %s %s:%zd\n", FunctionStr.c_str(), FileStr.c_str(), Line); 312 if (Counter) 313 for (auto PC : UncoveredPCs) 314 Printf(" UNCOVERED_PC: %s\n", 315 DescribePC("%s:%l", GetNextInstructionPc(PC)).c_str()); 316 }; 317 318 IterateCoveredFunctions(CoveredFunctionCallback); 319 } 320 321 // Value profile. 322 // We keep track of various values that affect control flow. 323 // These values are inserted into a bit-set-based hash map. 324 // Every new bit in the map is treated as a new coverage. 325 // 326 // For memcmp/strcmp/etc the interesting value is the length of the common 327 // prefix of the parameters. 328 // For cmp instructions the interesting value is a XOR of the parameters. 329 // The interesting value is mixed up with the PC and is then added to the map. 330 331 ATTRIBUTE_NO_SANITIZE_ALL 332 void TracePC::AddValueForMemcmp(void *caller_pc, const void *s1, const void *s2, 333 size_t n, bool StopAtZero) { 334 if (!n) return; 335 size_t Len = std::min(n, Word::GetMaxSize()); 336 const uint8_t *A1 = reinterpret_cast<const uint8_t *>(s1); 337 const uint8_t *A2 = reinterpret_cast<const uint8_t *>(s2); 338 uint8_t B1[Word::kMaxSize]; 339 uint8_t B2[Word::kMaxSize]; 340 // Copy the data into locals in this non-msan-instrumented function 341 // to avoid msan complaining further. 342 size_t Hash = 0; // Compute some simple hash of both strings. 343 for (size_t i = 0; i < Len; i++) { 344 B1[i] = A1[i]; 345 B2[i] = A2[i]; 346 size_t T = B1[i]; 347 Hash ^= (T << 8) | B2[i]; 348 } 349 size_t I = 0; 350 for (; I < Len; I++) 351 if (B1[I] != B2[I] || (StopAtZero && B1[I] == 0)) 352 break; 353 size_t PC = reinterpret_cast<size_t>(caller_pc); 354 size_t Idx = (PC & 4095) | (I << 12); 355 ValueProfileMap.AddValue(Idx); 356 TORCW.Insert(Idx ^ Hash, Word(B1, Len), Word(B2, Len)); 357 } 358 359 template <class T> 360 ATTRIBUTE_TARGET_POPCNT ALWAYS_INLINE 361 ATTRIBUTE_NO_SANITIZE_ALL 362 void TracePC::HandleCmp(uintptr_t PC, T Arg1, T Arg2) { 363 uint64_t ArgXor = Arg1 ^ Arg2; 364 if (sizeof(T) == 4) 365 TORC4.Insert(ArgXor, Arg1, Arg2); 366 else if (sizeof(T) == 8) 367 TORC8.Insert(ArgXor, Arg1, Arg2); 368 uint64_t HammingDistance = Popcountll(ArgXor); // [0,64] 369 uint64_t AbsoluteDistance = (Arg1 == Arg2 ? 0 : Clzll(Arg1 - Arg2) + 1); 370 ValueProfileMap.AddValue(PC * 128 + HammingDistance); 371 ValueProfileMap.AddValue(PC * 128 + 64 + AbsoluteDistance); 372 } 373 374 static size_t InternalStrnlen(const char *S, size_t MaxLen) { 375 size_t Len = 0; 376 for (; Len < MaxLen && S[Len]; Len++) {} 377 return Len; 378 } 379 380 // Finds min of (strlen(S1), strlen(S2)). 381 // Needed bacause one of these strings may actually be non-zero terminated. 382 static size_t InternalStrnlen2(const char *S1, const char *S2) { 383 size_t Len = 0; 384 for (; S1[Len] && S2[Len]; Len++) {} 385 return Len; 386 } 387 388 void TracePC::ClearInlineCounters() { 389 IterateCounterRegions([](const Module::Region &R){ 390 if (R.Enabled) 391 memset(R.Start, 0, R.Stop - R.Start); 392 }); 393 } 394 395 ATTRIBUTE_NO_SANITIZE_ALL 396 void TracePC::RecordInitialStack() { 397 int stack; 398 __sancov_lowest_stack = InitialStack = reinterpret_cast<uintptr_t>(&stack); 399 } 400 401 uintptr_t TracePC::GetMaxStackOffset() const { 402 return InitialStack - __sancov_lowest_stack; // Stack grows down 403 } 404 405 void WarnAboutDeprecatedInstrumentation(const char *flag) { 406 // Use RawPrint because Printf cannot be used on Windows before OutputFile is 407 // initialized. 408 RawPrint(flag); 409 RawPrint( 410 " is no longer supported by libFuzzer.\n" 411 "Please either migrate to a compiler that supports -fsanitize=fuzzer\n" 412 "or use an older version of libFuzzer\n"); 413 exit(1); 414 } 415 416 } // namespace fuzzer 417 418 extern "C" { 419 ATTRIBUTE_INTERFACE 420 ATTRIBUTE_NO_SANITIZE_ALL 421 void __sanitizer_cov_trace_pc_guard(uint32_t *Guard) { 422 fuzzer::WarnAboutDeprecatedInstrumentation( 423 "-fsanitize-coverage=trace-pc-guard"); 424 } 425 426 // Best-effort support for -fsanitize-coverage=trace-pc, which is available 427 // in both Clang and GCC. 428 ATTRIBUTE_INTERFACE 429 ATTRIBUTE_NO_SANITIZE_ALL 430 void __sanitizer_cov_trace_pc() { 431 fuzzer::WarnAboutDeprecatedInstrumentation("-fsanitize-coverage=trace-pc"); 432 } 433 434 ATTRIBUTE_INTERFACE 435 void __sanitizer_cov_trace_pc_guard_init(uint32_t *Start, uint32_t *Stop) { 436 fuzzer::WarnAboutDeprecatedInstrumentation( 437 "-fsanitize-coverage=trace-pc-guard"); 438 } 439 440 ATTRIBUTE_INTERFACE 441 void __sanitizer_cov_8bit_counters_init(uint8_t *Start, uint8_t *Stop) { 442 fuzzer::TPC.HandleInline8bitCountersInit(Start, Stop); 443 } 444 445 ATTRIBUTE_INTERFACE 446 void __sanitizer_cov_pcs_init(const uintptr_t *pcs_beg, 447 const uintptr_t *pcs_end) { 448 fuzzer::TPC.HandlePCsInit(pcs_beg, pcs_end); 449 } 450 451 ATTRIBUTE_INTERFACE 452 ATTRIBUTE_NO_SANITIZE_ALL 453 void __sanitizer_cov_trace_pc_indir(uintptr_t Callee) { 454 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC()); 455 fuzzer::TPC.HandleCallerCallee(PC, Callee); 456 } 457 458 ATTRIBUTE_INTERFACE 459 ATTRIBUTE_NO_SANITIZE_ALL 460 ATTRIBUTE_TARGET_POPCNT 461 void __sanitizer_cov_trace_cmp8(uint64_t Arg1, uint64_t Arg2) { 462 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC()); 463 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2); 464 } 465 466 ATTRIBUTE_INTERFACE 467 ATTRIBUTE_NO_SANITIZE_ALL 468 ATTRIBUTE_TARGET_POPCNT 469 // Now the __sanitizer_cov_trace_const_cmp[1248] callbacks just mimic 470 // the behaviour of __sanitizer_cov_trace_cmp[1248] ones. This, however, 471 // should be changed later to make full use of instrumentation. 472 void __sanitizer_cov_trace_const_cmp8(uint64_t Arg1, uint64_t Arg2) { 473 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC()); 474 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2); 475 } 476 477 ATTRIBUTE_INTERFACE 478 ATTRIBUTE_NO_SANITIZE_ALL 479 ATTRIBUTE_TARGET_POPCNT 480 void __sanitizer_cov_trace_cmp4(uint32_t Arg1, uint32_t Arg2) { 481 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC()); 482 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2); 483 } 484 485 ATTRIBUTE_INTERFACE 486 ATTRIBUTE_NO_SANITIZE_ALL 487 ATTRIBUTE_TARGET_POPCNT 488 void __sanitizer_cov_trace_const_cmp4(uint32_t Arg1, uint32_t Arg2) { 489 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC()); 490 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2); 491 } 492 493 ATTRIBUTE_INTERFACE 494 ATTRIBUTE_NO_SANITIZE_ALL 495 ATTRIBUTE_TARGET_POPCNT 496 void __sanitizer_cov_trace_cmp2(uint16_t Arg1, uint16_t Arg2) { 497 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC()); 498 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2); 499 } 500 501 ATTRIBUTE_INTERFACE 502 ATTRIBUTE_NO_SANITIZE_ALL 503 ATTRIBUTE_TARGET_POPCNT 504 void __sanitizer_cov_trace_const_cmp2(uint16_t Arg1, uint16_t Arg2) { 505 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC()); 506 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2); 507 } 508 509 ATTRIBUTE_INTERFACE 510 ATTRIBUTE_NO_SANITIZE_ALL 511 ATTRIBUTE_TARGET_POPCNT 512 void __sanitizer_cov_trace_cmp1(uint8_t Arg1, uint8_t Arg2) { 513 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC()); 514 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2); 515 } 516 517 ATTRIBUTE_INTERFACE 518 ATTRIBUTE_NO_SANITIZE_ALL 519 ATTRIBUTE_TARGET_POPCNT 520 void __sanitizer_cov_trace_const_cmp1(uint8_t Arg1, uint8_t Arg2) { 521 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC()); 522 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2); 523 } 524 525 ATTRIBUTE_INTERFACE 526 ATTRIBUTE_NO_SANITIZE_ALL 527 ATTRIBUTE_TARGET_POPCNT 528 void __sanitizer_cov_trace_switch(uint64_t Val, uint64_t *Cases) { 529 uint64_t N = Cases[0]; 530 uint64_t ValSizeInBits = Cases[1]; 531 uint64_t *Vals = Cases + 2; 532 // Skip the most common and the most boring case: all switch values are small. 533 // We may want to skip this at compile-time, but it will make the 534 // instrumentation less general. 535 if (Vals[N - 1] < 256) 536 return; 537 // Also skip small inputs values, they won't give good signal. 538 if (Val < 256) 539 return; 540 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC()); 541 size_t i; 542 uint64_t Smaller = 0; 543 uint64_t Larger = ~(uint64_t)0; 544 // Find two switch values such that Smaller < Val < Larger. 545 // Use 0 and 0xfff..f as the defaults. 546 for (i = 0; i < N; i++) { 547 if (Val < Vals[i]) { 548 Larger = Vals[i]; 549 break; 550 } 551 if (Val > Vals[i]) Smaller = Vals[i]; 552 } 553 554 // Apply HandleCmp to {Val,Smaller} and {Val, Larger}, 555 // use i as the PC modifier for HandleCmp. 556 if (ValSizeInBits == 16) { 557 fuzzer::TPC.HandleCmp(PC + 2 * i, static_cast<uint16_t>(Val), 558 (uint16_t)(Smaller)); 559 fuzzer::TPC.HandleCmp(PC + 2 * i + 1, static_cast<uint16_t>(Val), 560 (uint16_t)(Larger)); 561 } else if (ValSizeInBits == 32) { 562 fuzzer::TPC.HandleCmp(PC + 2 * i, static_cast<uint32_t>(Val), 563 (uint32_t)(Smaller)); 564 fuzzer::TPC.HandleCmp(PC + 2 * i + 1, static_cast<uint32_t>(Val), 565 (uint32_t)(Larger)); 566 } else { 567 fuzzer::TPC.HandleCmp(PC + 2*i, Val, Smaller); 568 fuzzer::TPC.HandleCmp(PC + 2*i + 1, Val, Larger); 569 } 570 } 571 572 ATTRIBUTE_INTERFACE 573 ATTRIBUTE_NO_SANITIZE_ALL 574 ATTRIBUTE_TARGET_POPCNT 575 void __sanitizer_cov_trace_div4(uint32_t Val) { 576 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC()); 577 fuzzer::TPC.HandleCmp(PC, Val, (uint32_t)0); 578 } 579 580 ATTRIBUTE_INTERFACE 581 ATTRIBUTE_NO_SANITIZE_ALL 582 ATTRIBUTE_TARGET_POPCNT 583 void __sanitizer_cov_trace_div8(uint64_t Val) { 584 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC()); 585 fuzzer::TPC.HandleCmp(PC, Val, (uint64_t)0); 586 } 587 588 ATTRIBUTE_INTERFACE 589 ATTRIBUTE_NO_SANITIZE_ALL 590 ATTRIBUTE_TARGET_POPCNT 591 void __sanitizer_cov_trace_gep(uintptr_t Idx) { 592 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC()); 593 fuzzer::TPC.HandleCmp(PC, Idx, (uintptr_t)0); 594 } 595 596 ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY 597 void __sanitizer_weak_hook_memcmp(void *caller_pc, const void *s1, 598 const void *s2, size_t n, int result) { 599 if (!fuzzer::RunningUserCallback) return; 600 if (result == 0) return; // No reason to mutate. 601 if (n <= 1) return; // Not interesting. 602 fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, n, /*StopAtZero*/false); 603 } 604 605 ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY 606 void __sanitizer_weak_hook_strncmp(void *caller_pc, const char *s1, 607 const char *s2, size_t n, int result) { 608 if (!fuzzer::RunningUserCallback) return; 609 if (result == 0) return; // No reason to mutate. 610 size_t Len1 = fuzzer::InternalStrnlen(s1, n); 611 size_t Len2 = fuzzer::InternalStrnlen(s2, n); 612 n = std::min(n, Len1); 613 n = std::min(n, Len2); 614 if (n <= 1) return; // Not interesting. 615 fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, n, /*StopAtZero*/true); 616 } 617 618 ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY 619 void __sanitizer_weak_hook_strcmp(void *caller_pc, const char *s1, 620 const char *s2, int result) { 621 if (!fuzzer::RunningUserCallback) return; 622 if (result == 0) return; // No reason to mutate. 623 size_t N = fuzzer::InternalStrnlen2(s1, s2); 624 if (N <= 1) return; // Not interesting. 625 fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, N, /*StopAtZero*/true); 626 } 627 628 ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY 629 void __sanitizer_weak_hook_strncasecmp(void *called_pc, const char *s1, 630 const char *s2, size_t n, int result) { 631 if (!fuzzer::RunningUserCallback) return; 632 return __sanitizer_weak_hook_strncmp(called_pc, s1, s2, n, result); 633 } 634 635 ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY 636 void __sanitizer_weak_hook_strcasecmp(void *called_pc, const char *s1, 637 const char *s2, int result) { 638 if (!fuzzer::RunningUserCallback) return; 639 return __sanitizer_weak_hook_strcmp(called_pc, s1, s2, result); 640 } 641 642 ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY 643 void __sanitizer_weak_hook_strstr(void *called_pc, const char *s1, 644 const char *s2, char *result) { 645 if (!fuzzer::RunningUserCallback) return; 646 fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), strlen(s2)); 647 } 648 649 ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY 650 void __sanitizer_weak_hook_strcasestr(void *called_pc, const char *s1, 651 const char *s2, char *result) { 652 if (!fuzzer::RunningUserCallback) return; 653 fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), strlen(s2)); 654 } 655 656 ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY 657 void __sanitizer_weak_hook_memmem(void *called_pc, const void *s1, size_t len1, 658 const void *s2, size_t len2, void *result) { 659 if (!fuzzer::RunningUserCallback) return; 660 fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), len2); 661 } 662 } // extern "C" 663