1 //===- FuzzerTracePC.cpp - PC tracing--------------------------------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // Trace PCs. 9 // This module implements __sanitizer_cov_trace_pc_guard[_init], 10 // the callback required for -fsanitize-coverage=trace-pc-guard instrumentation. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "FuzzerTracePC.h" 15 #include "FuzzerBuiltins.h" 16 #include "FuzzerBuiltinsMsvc.h" 17 #include "FuzzerCorpus.h" 18 #include "FuzzerDefs.h" 19 #include "FuzzerDictionary.h" 20 #include "FuzzerExtFunctions.h" 21 #include "FuzzerIO.h" 22 #include "FuzzerUtil.h" 23 #include "FuzzerValueBitMap.h" 24 #include <set> 25 26 // Used by -fsanitize-coverage=stack-depth to track stack depth 27 ATTRIBUTES_INTERFACE_TLS_INITIAL_EXEC uintptr_t __sancov_lowest_stack; 28 29 namespace fuzzer { 30 31 TracePC TPC; 32 33 size_t TracePC::GetTotalPCCoverage() { 34 return ObservedPCs.size(); 35 } 36 37 38 void TracePC::HandleInline8bitCountersInit(uint8_t *Start, uint8_t *Stop) { 39 if (Start == Stop) return; 40 if (NumModules && 41 Modules[NumModules - 1].Start() == Start) 42 return; 43 assert(NumModules < 44 sizeof(Modules) / sizeof(Modules[0])); 45 auto &M = Modules[NumModules++]; 46 uint8_t *AlignedStart = RoundUpByPage(Start); 47 uint8_t *AlignedStop = RoundDownByPage(Stop); 48 size_t NumFullPages = AlignedStop > AlignedStart ? 49 (AlignedStop - AlignedStart) / PageSize() : 0; 50 bool NeedFirst = Start < AlignedStart || !NumFullPages; 51 bool NeedLast = Stop > AlignedStop && AlignedStop >= AlignedStart; 52 M.NumRegions = NumFullPages + NeedFirst + NeedLast;; 53 assert(M.NumRegions > 0); 54 M.Regions = new Module::Region[M.NumRegions]; 55 assert(M.Regions); 56 size_t R = 0; 57 if (NeedFirst) 58 M.Regions[R++] = {Start, std::min(Stop, AlignedStart), true, false}; 59 for (uint8_t *P = AlignedStart; P < AlignedStop; P += PageSize()) 60 M.Regions[R++] = {P, P + PageSize(), true, true}; 61 if (NeedLast) 62 M.Regions[R++] = {AlignedStop, Stop, true, false}; 63 assert(R == M.NumRegions); 64 assert(M.Size() == (size_t)(Stop - Start)); 65 assert(M.Stop() == Stop); 66 assert(M.Start() == Start); 67 NumInline8bitCounters += M.Size(); 68 } 69 70 // Mark all full page counter regions as PROT_NONE and set Enabled=false. 71 // The first time the instrumented code hits such a protected/disabled 72 // counter region we should catch a SEGV and call UnprotectLazyCounters, 73 // which will mark the page as PROT_READ|PROT_WRITE and set Enabled=true. 74 // 75 // Whenever other functions iterate over the counters they should ignore 76 // regions with Enabled=false. 77 void TracePC::ProtectLazyCounters() { 78 size_t NumPagesProtected = 0; 79 IterateCounterRegions([&](Module::Region &R) { 80 if (!R.OneFullPage) return; 81 if (Mprotect(R.Start, R.Stop - R.Start, false)) { 82 R.Enabled = false; 83 NumPagesProtected++; 84 } 85 }); 86 if (NumPagesProtected) 87 Printf("INFO: %zd pages of counters where protected;" 88 " libFuzzer's SEGV handler must be installed\n", 89 NumPagesProtected); 90 } 91 92 bool TracePC::UnprotectLazyCounters(void *CounterPtr) { 93 // Printf("UnprotectLazyCounters: %p\n", CounterPtr); 94 if (!CounterPtr) 95 return false; 96 bool Done = false; 97 uint8_t *Addr = reinterpret_cast<uint8_t *>(CounterPtr); 98 IterateCounterRegions([&](Module::Region &R) { 99 if (!R.OneFullPage || R.Enabled || Done) return; 100 if (Addr >= R.Start && Addr < R.Stop) 101 if (Mprotect(R.Start, R.Stop - R.Start, true)) { 102 R.Enabled = true; 103 Done = true; 104 } 105 }); 106 return Done; 107 } 108 109 void TracePC::HandlePCsInit(const uintptr_t *Start, const uintptr_t *Stop) { 110 const PCTableEntry *B = reinterpret_cast<const PCTableEntry *>(Start); 111 const PCTableEntry *E = reinterpret_cast<const PCTableEntry *>(Stop); 112 if (NumPCTables && ModulePCTable[NumPCTables - 1].Start == B) return; 113 assert(NumPCTables < sizeof(ModulePCTable) / sizeof(ModulePCTable[0])); 114 ModulePCTable[NumPCTables++] = {B, E}; 115 NumPCsInPCTables += E - B; 116 } 117 118 void TracePC::PrintModuleInfo() { 119 if (NumModules) { 120 Printf("INFO: Loaded %zd modules (%zd inline 8-bit counters): ", 121 NumModules, NumInline8bitCounters); 122 for (size_t i = 0; i < NumModules; i++) 123 Printf("%zd [%p, %p), ", Modules[i].Size(), Modules[i].Start(), 124 Modules[i].Stop()); 125 Printf("\n"); 126 } 127 if (NumPCTables) { 128 Printf("INFO: Loaded %zd PC tables (%zd PCs): ", NumPCTables, 129 NumPCsInPCTables); 130 for (size_t i = 0; i < NumPCTables; i++) { 131 Printf("%zd [%p,%p), ", ModulePCTable[i].Stop - ModulePCTable[i].Start, 132 ModulePCTable[i].Start, ModulePCTable[i].Stop); 133 } 134 Printf("\n"); 135 136 if (NumInline8bitCounters && NumInline8bitCounters != NumPCsInPCTables) { 137 Printf("ERROR: The size of coverage PC tables does not match the\n" 138 "number of instrumented PCs. This might be a compiler bug,\n" 139 "please contact the libFuzzer developers.\n" 140 "Also check https://bugs.llvm.org/show_bug.cgi?id=34636\n" 141 "for possible workarounds (tl;dr: don't use the old GNU ld)\n"); 142 _Exit(1); 143 } 144 } 145 if (size_t NumExtraCounters = ExtraCountersEnd() - ExtraCountersBegin()) 146 Printf("INFO: %zd Extra Counters\n", NumExtraCounters); 147 } 148 149 ATTRIBUTE_NO_SANITIZE_ALL 150 void TracePC::HandleCallerCallee(uintptr_t Caller, uintptr_t Callee) { 151 const uintptr_t kBits = 12; 152 const uintptr_t kMask = (1 << kBits) - 1; 153 uintptr_t Idx = (Caller & kMask) | ((Callee & kMask) << kBits); 154 ValueProfileMap.AddValueModPrime(Idx); 155 } 156 157 /// \return the address of the previous instruction. 158 /// Note: the logic is copied from `sanitizer_common/sanitizer_stacktrace.h` 159 inline ALWAYS_INLINE uintptr_t GetPreviousInstructionPc(uintptr_t PC) { 160 #if defined(__arm__) 161 // T32 (Thumb) branch instructions might be 16 or 32 bit long, 162 // so we return (pc-2) in that case in order to be safe. 163 // For A32 mode we return (pc-4) because all instructions are 32 bit long. 164 return (PC - 3) & (~1); 165 #elif defined(__powerpc__) || defined(__powerpc64__) || defined(__aarch64__) 166 // PCs are always 4 byte aligned. 167 return PC - 4; 168 #elif defined(__sparc__) || defined(__mips__) 169 return PC - 8; 170 #else 171 return PC - 1; 172 #endif 173 } 174 175 /// \return the address of the next instruction. 176 /// Note: the logic is copied from `sanitizer_common/sanitizer_stacktrace.cc` 177 ALWAYS_INLINE uintptr_t TracePC::GetNextInstructionPc(uintptr_t PC) { 178 #if defined(__mips__) 179 return PC + 8; 180 #elif defined(__powerpc__) || defined(__sparc__) || defined(__arm__) || \ 181 defined(__aarch64__) 182 return PC + 4; 183 #else 184 return PC + 1; 185 #endif 186 } 187 188 void TracePC::UpdateObservedPCs() { 189 Vector<uintptr_t> CoveredFuncs; 190 auto ObservePC = [&](const PCTableEntry *TE) { 191 if (ObservedPCs.insert(TE).second && DoPrintNewPCs) { 192 PrintPC("\tNEW_PC: %p %F %L", "\tNEW_PC: %p", 193 GetNextInstructionPc(TE->PC)); 194 Printf("\n"); 195 } 196 }; 197 198 auto Observe = [&](const PCTableEntry *TE) { 199 if (PcIsFuncEntry(TE)) 200 if (++ObservedFuncs[TE->PC] == 1 && NumPrintNewFuncs) 201 CoveredFuncs.push_back(TE->PC); 202 ObservePC(TE); 203 }; 204 205 if (NumPCsInPCTables) { 206 if (NumInline8bitCounters == NumPCsInPCTables) { 207 for (size_t i = 0; i < NumModules; i++) { 208 auto &M = Modules[i]; 209 assert(M.Size() == 210 (size_t)(ModulePCTable[i].Stop - ModulePCTable[i].Start)); 211 for (size_t r = 0; r < M.NumRegions; r++) { 212 auto &R = M.Regions[r]; 213 if (!R.Enabled) continue; 214 for (uint8_t *P = R.Start; P < R.Stop; P++) 215 if (*P) 216 Observe(&ModulePCTable[i].Start[M.Idx(P)]); 217 } 218 } 219 } 220 } 221 222 for (size_t i = 0, N = Min(CoveredFuncs.size(), NumPrintNewFuncs); i < N; 223 i++) { 224 Printf("\tNEW_FUNC[%zd/%zd]: ", i + 1, CoveredFuncs.size()); 225 PrintPC("%p %F %L", "%p", GetNextInstructionPc(CoveredFuncs[i])); 226 Printf("\n"); 227 } 228 } 229 230 uintptr_t TracePC::PCTableEntryIdx(const PCTableEntry *TE) { 231 size_t TotalTEs = 0; 232 for (size_t i = 0; i < NumPCTables; i++) { 233 auto &M = ModulePCTable[i]; 234 if (TE >= M.Start && TE < M.Stop) 235 return TotalTEs + TE - M.Start; 236 TotalTEs += M.Stop - M.Start; 237 } 238 assert(0); 239 return 0; 240 } 241 242 const TracePC::PCTableEntry *TracePC::PCTableEntryByIdx(uintptr_t Idx) { 243 for (size_t i = 0; i < NumPCTables; i++) { 244 auto &M = ModulePCTable[i]; 245 size_t Size = M.Stop - M.Start; 246 if (Idx < Size) return &M.Start[Idx]; 247 Idx -= Size; 248 } 249 return nullptr; 250 } 251 252 static std::string GetModuleName(uintptr_t PC) { 253 char ModulePathRaw[4096] = ""; // What's PATH_MAX in portable C++? 254 void *OffsetRaw = nullptr; 255 if (!EF->__sanitizer_get_module_and_offset_for_pc( 256 reinterpret_cast<void *>(PC), ModulePathRaw, 257 sizeof(ModulePathRaw), &OffsetRaw)) 258 return ""; 259 return ModulePathRaw; 260 } 261 262 template<class CallBack> 263 void TracePC::IterateCoveredFunctions(CallBack CB) { 264 for (size_t i = 0; i < NumPCTables; i++) { 265 auto &M = ModulePCTable[i]; 266 assert(M.Start < M.Stop); 267 auto ModuleName = GetModuleName(M.Start->PC); 268 for (auto NextFE = M.Start; NextFE < M.Stop; ) { 269 auto FE = NextFE; 270 assert(PcIsFuncEntry(FE) && "Not a function entry point"); 271 do { 272 NextFE++; 273 } while (NextFE < M.Stop && !(PcIsFuncEntry(NextFE))); 274 CB(FE, NextFE, ObservedFuncs[FE->PC]); 275 } 276 } 277 } 278 279 void TracePC::SetFocusFunction(const std::string &FuncName) { 280 // This function should be called once. 281 assert(!FocusFunctionCounterPtr); 282 if (FuncName.empty()) 283 return; 284 for (size_t M = 0; M < NumModules; M++) { 285 auto &PCTE = ModulePCTable[M]; 286 size_t N = PCTE.Stop - PCTE.Start; 287 for (size_t I = 0; I < N; I++) { 288 if (!(PcIsFuncEntry(&PCTE.Start[I]))) continue; // not a function entry. 289 auto Name = DescribePC("%F", GetNextInstructionPc(PCTE.Start[I].PC)); 290 if (Name[0] == 'i' && Name[1] == 'n' && Name[2] == ' ') 291 Name = Name.substr(3, std::string::npos); 292 if (FuncName != Name) continue; 293 Printf("INFO: Focus function is set to '%s'\n", Name.c_str()); 294 FocusFunctionCounterPtr = Modules[M].Start() + I; 295 return; 296 } 297 } 298 } 299 300 bool TracePC::ObservedFocusFunction() { 301 return FocusFunctionCounterPtr && *FocusFunctionCounterPtr; 302 } 303 304 void TracePC::PrintCoverage() { 305 if (!EF->__sanitizer_symbolize_pc || 306 !EF->__sanitizer_get_module_and_offset_for_pc) { 307 Printf("INFO: __sanitizer_symbolize_pc or " 308 "__sanitizer_get_module_and_offset_for_pc is not available," 309 " not printing coverage\n"); 310 return; 311 } 312 Printf("COVERAGE:\n"); 313 auto CoveredFunctionCallback = [&](const PCTableEntry *First, 314 const PCTableEntry *Last, 315 uintptr_t Counter) { 316 assert(First < Last); 317 auto VisualizePC = GetNextInstructionPc(First->PC); 318 std::string FileStr = DescribePC("%s", VisualizePC); 319 if (!IsInterestingCoverageFile(FileStr)) 320 return; 321 std::string FunctionStr = DescribePC("%F", VisualizePC); 322 if (FunctionStr.find("in ") == 0) 323 FunctionStr = FunctionStr.substr(3); 324 std::string LineStr = DescribePC("%l", VisualizePC); 325 size_t NumEdges = Last - First; 326 Vector<uintptr_t> UncoveredPCs; 327 for (auto TE = First; TE < Last; TE++) 328 if (!ObservedPCs.count(TE)) 329 UncoveredPCs.push_back(TE->PC); 330 Printf("%sCOVERED_FUNC: hits: %zd", Counter ? "" : "UN", Counter); 331 Printf(" edges: %zd/%zd", NumEdges - UncoveredPCs.size(), NumEdges); 332 Printf(" %s %s:%s\n", FunctionStr.c_str(), FileStr.c_str(), 333 LineStr.c_str()); 334 if (Counter) 335 for (auto PC : UncoveredPCs) 336 Printf(" UNCOVERED_PC: %s\n", 337 DescribePC("%s:%l", GetNextInstructionPc(PC)).c_str()); 338 }; 339 340 IterateCoveredFunctions(CoveredFunctionCallback); 341 } 342 343 // Value profile. 344 // We keep track of various values that affect control flow. 345 // These values are inserted into a bit-set-based hash map. 346 // Every new bit in the map is treated as a new coverage. 347 // 348 // For memcmp/strcmp/etc the interesting value is the length of the common 349 // prefix of the parameters. 350 // For cmp instructions the interesting value is a XOR of the parameters. 351 // The interesting value is mixed up with the PC and is then added to the map. 352 353 ATTRIBUTE_NO_SANITIZE_ALL 354 void TracePC::AddValueForMemcmp(void *caller_pc, const void *s1, const void *s2, 355 size_t n, bool StopAtZero) { 356 if (!n) return; 357 size_t Len = std::min(n, Word::GetMaxSize()); 358 const uint8_t *A1 = reinterpret_cast<const uint8_t *>(s1); 359 const uint8_t *A2 = reinterpret_cast<const uint8_t *>(s2); 360 uint8_t B1[Word::kMaxSize]; 361 uint8_t B2[Word::kMaxSize]; 362 // Copy the data into locals in this non-msan-instrumented function 363 // to avoid msan complaining further. 364 size_t Hash = 0; // Compute some simple hash of both strings. 365 for (size_t i = 0; i < Len; i++) { 366 B1[i] = A1[i]; 367 B2[i] = A2[i]; 368 size_t T = B1[i]; 369 Hash ^= (T << 8) | B2[i]; 370 } 371 size_t I = 0; 372 for (; I < Len; I++) 373 if (B1[I] != B2[I] || (StopAtZero && B1[I] == 0)) 374 break; 375 size_t PC = reinterpret_cast<size_t>(caller_pc); 376 size_t Idx = (PC & 4095) | (I << 12); 377 ValueProfileMap.AddValue(Idx); 378 TORCW.Insert(Idx ^ Hash, Word(B1, Len), Word(B2, Len)); 379 } 380 381 template <class T> 382 ATTRIBUTE_TARGET_POPCNT ALWAYS_INLINE 383 ATTRIBUTE_NO_SANITIZE_ALL 384 void TracePC::HandleCmp(uintptr_t PC, T Arg1, T Arg2) { 385 uint64_t ArgXor = Arg1 ^ Arg2; 386 if (sizeof(T) == 4) 387 TORC4.Insert(ArgXor, Arg1, Arg2); 388 else if (sizeof(T) == 8) 389 TORC8.Insert(ArgXor, Arg1, Arg2); 390 uint64_t HammingDistance = Popcountll(ArgXor); // [0,64] 391 uint64_t AbsoluteDistance = (Arg1 == Arg2 ? 0 : Clzll(Arg1 - Arg2) + 1); 392 ValueProfileMap.AddValue(PC * 128 + HammingDistance); 393 ValueProfileMap.AddValue(PC * 128 + 64 + AbsoluteDistance); 394 } 395 396 static size_t InternalStrnlen(const char *S, size_t MaxLen) { 397 size_t Len = 0; 398 for (; Len < MaxLen && S[Len]; Len++) {} 399 return Len; 400 } 401 402 // Finds min of (strlen(S1), strlen(S2)). 403 // Needed bacause one of these strings may actually be non-zero terminated. 404 static size_t InternalStrnlen2(const char *S1, const char *S2) { 405 size_t Len = 0; 406 for (; S1[Len] && S2[Len]; Len++) {} 407 return Len; 408 } 409 410 void TracePC::ClearInlineCounters() { 411 IterateCounterRegions([](const Module::Region &R){ 412 if (R.Enabled) 413 memset(R.Start, 0, R.Stop - R.Start); 414 }); 415 } 416 417 ATTRIBUTE_NO_SANITIZE_ALL 418 void TracePC::RecordInitialStack() { 419 int stack; 420 __sancov_lowest_stack = InitialStack = reinterpret_cast<uintptr_t>(&stack); 421 } 422 423 uintptr_t TracePC::GetMaxStackOffset() const { 424 return InitialStack - __sancov_lowest_stack; // Stack grows down 425 } 426 427 void WarnAboutDeprecatedInstrumentation(const char *flag) { 428 // Use RawPrint because Printf cannot be used on Windows before OutputFile is 429 // initialized. 430 RawPrint(flag); 431 RawPrint( 432 " is no longer supported by libFuzzer.\n" 433 "Please either migrate to a compiler that supports -fsanitize=fuzzer\n" 434 "or use an older version of libFuzzer\n"); 435 exit(1); 436 } 437 438 } // namespace fuzzer 439 440 extern "C" { 441 ATTRIBUTE_INTERFACE 442 ATTRIBUTE_NO_SANITIZE_ALL 443 void __sanitizer_cov_trace_pc_guard(uint32_t *Guard) { 444 fuzzer::WarnAboutDeprecatedInstrumentation( 445 "-fsanitize-coverage=trace-pc-guard"); 446 } 447 448 // Best-effort support for -fsanitize-coverage=trace-pc, which is available 449 // in both Clang and GCC. 450 ATTRIBUTE_INTERFACE 451 ATTRIBUTE_NO_SANITIZE_ALL 452 void __sanitizer_cov_trace_pc() { 453 fuzzer::WarnAboutDeprecatedInstrumentation("-fsanitize-coverage=trace-pc"); 454 } 455 456 ATTRIBUTE_INTERFACE 457 void __sanitizer_cov_trace_pc_guard_init(uint32_t *Start, uint32_t *Stop) { 458 fuzzer::WarnAboutDeprecatedInstrumentation( 459 "-fsanitize-coverage=trace-pc-guard"); 460 } 461 462 ATTRIBUTE_INTERFACE 463 void __sanitizer_cov_8bit_counters_init(uint8_t *Start, uint8_t *Stop) { 464 fuzzer::TPC.HandleInline8bitCountersInit(Start, Stop); 465 } 466 467 ATTRIBUTE_INTERFACE 468 void __sanitizer_cov_pcs_init(const uintptr_t *pcs_beg, 469 const uintptr_t *pcs_end) { 470 fuzzer::TPC.HandlePCsInit(pcs_beg, pcs_end); 471 } 472 473 ATTRIBUTE_INTERFACE 474 ATTRIBUTE_NO_SANITIZE_ALL 475 void __sanitizer_cov_trace_pc_indir(uintptr_t Callee) { 476 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC()); 477 fuzzer::TPC.HandleCallerCallee(PC, Callee); 478 } 479 480 ATTRIBUTE_INTERFACE 481 ATTRIBUTE_NO_SANITIZE_ALL 482 ATTRIBUTE_TARGET_POPCNT 483 void __sanitizer_cov_trace_cmp8(uint64_t Arg1, uint64_t Arg2) { 484 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC()); 485 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2); 486 } 487 488 ATTRIBUTE_INTERFACE 489 ATTRIBUTE_NO_SANITIZE_ALL 490 ATTRIBUTE_TARGET_POPCNT 491 // Now the __sanitizer_cov_trace_const_cmp[1248] callbacks just mimic 492 // the behaviour of __sanitizer_cov_trace_cmp[1248] ones. This, however, 493 // should be changed later to make full use of instrumentation. 494 void __sanitizer_cov_trace_const_cmp8(uint64_t Arg1, uint64_t Arg2) { 495 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC()); 496 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2); 497 } 498 499 ATTRIBUTE_INTERFACE 500 ATTRIBUTE_NO_SANITIZE_ALL 501 ATTRIBUTE_TARGET_POPCNT 502 void __sanitizer_cov_trace_cmp4(uint32_t Arg1, uint32_t Arg2) { 503 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC()); 504 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2); 505 } 506 507 ATTRIBUTE_INTERFACE 508 ATTRIBUTE_NO_SANITIZE_ALL 509 ATTRIBUTE_TARGET_POPCNT 510 void __sanitizer_cov_trace_const_cmp4(uint32_t Arg1, uint32_t Arg2) { 511 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC()); 512 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2); 513 } 514 515 ATTRIBUTE_INTERFACE 516 ATTRIBUTE_NO_SANITIZE_ALL 517 ATTRIBUTE_TARGET_POPCNT 518 void __sanitizer_cov_trace_cmp2(uint16_t Arg1, uint16_t Arg2) { 519 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC()); 520 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2); 521 } 522 523 ATTRIBUTE_INTERFACE 524 ATTRIBUTE_NO_SANITIZE_ALL 525 ATTRIBUTE_TARGET_POPCNT 526 void __sanitizer_cov_trace_const_cmp2(uint16_t Arg1, uint16_t Arg2) { 527 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC()); 528 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2); 529 } 530 531 ATTRIBUTE_INTERFACE 532 ATTRIBUTE_NO_SANITIZE_ALL 533 ATTRIBUTE_TARGET_POPCNT 534 void __sanitizer_cov_trace_cmp1(uint8_t Arg1, uint8_t Arg2) { 535 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC()); 536 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2); 537 } 538 539 ATTRIBUTE_INTERFACE 540 ATTRIBUTE_NO_SANITIZE_ALL 541 ATTRIBUTE_TARGET_POPCNT 542 void __sanitizer_cov_trace_const_cmp1(uint8_t Arg1, uint8_t Arg2) { 543 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC()); 544 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2); 545 } 546 547 ATTRIBUTE_INTERFACE 548 ATTRIBUTE_NO_SANITIZE_ALL 549 ATTRIBUTE_TARGET_POPCNT 550 void __sanitizer_cov_trace_switch(uint64_t Val, uint64_t *Cases) { 551 uint64_t N = Cases[0]; 552 uint64_t ValSizeInBits = Cases[1]; 553 uint64_t *Vals = Cases + 2; 554 // Skip the most common and the most boring case: all switch values are small. 555 // We may want to skip this at compile-time, but it will make the 556 // instrumentation less general. 557 if (Vals[N - 1] < 256) 558 return; 559 // Also skip small inputs values, they won't give good signal. 560 if (Val < 256) 561 return; 562 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC()); 563 size_t i; 564 uint64_t Smaller = 0; 565 uint64_t Larger = ~(uint64_t)0; 566 // Find two switch values such that Smaller < Val < Larger. 567 // Use 0 and 0xfff..f as the defaults. 568 for (i = 0; i < N; i++) { 569 if (Val < Vals[i]) { 570 Larger = Vals[i]; 571 break; 572 } 573 if (Val > Vals[i]) Smaller = Vals[i]; 574 } 575 576 // Apply HandleCmp to {Val,Smaller} and {Val, Larger}, 577 // use i as the PC modifier for HandleCmp. 578 if (ValSizeInBits == 16) { 579 fuzzer::TPC.HandleCmp(PC + 2 * i, static_cast<uint16_t>(Val), 580 (uint16_t)(Smaller)); 581 fuzzer::TPC.HandleCmp(PC + 2 * i + 1, static_cast<uint16_t>(Val), 582 (uint16_t)(Larger)); 583 } else if (ValSizeInBits == 32) { 584 fuzzer::TPC.HandleCmp(PC + 2 * i, static_cast<uint32_t>(Val), 585 (uint32_t)(Smaller)); 586 fuzzer::TPC.HandleCmp(PC + 2 * i + 1, static_cast<uint32_t>(Val), 587 (uint32_t)(Larger)); 588 } else { 589 fuzzer::TPC.HandleCmp(PC + 2*i, Val, Smaller); 590 fuzzer::TPC.HandleCmp(PC + 2*i + 1, Val, Larger); 591 } 592 } 593 594 ATTRIBUTE_INTERFACE 595 ATTRIBUTE_NO_SANITIZE_ALL 596 ATTRIBUTE_TARGET_POPCNT 597 void __sanitizer_cov_trace_div4(uint32_t Val) { 598 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC()); 599 fuzzer::TPC.HandleCmp(PC, Val, (uint32_t)0); 600 } 601 602 ATTRIBUTE_INTERFACE 603 ATTRIBUTE_NO_SANITIZE_ALL 604 ATTRIBUTE_TARGET_POPCNT 605 void __sanitizer_cov_trace_div8(uint64_t Val) { 606 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC()); 607 fuzzer::TPC.HandleCmp(PC, Val, (uint64_t)0); 608 } 609 610 ATTRIBUTE_INTERFACE 611 ATTRIBUTE_NO_SANITIZE_ALL 612 ATTRIBUTE_TARGET_POPCNT 613 void __sanitizer_cov_trace_gep(uintptr_t Idx) { 614 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC()); 615 fuzzer::TPC.HandleCmp(PC, Idx, (uintptr_t)0); 616 } 617 618 ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY 619 void __sanitizer_weak_hook_memcmp(void *caller_pc, const void *s1, 620 const void *s2, size_t n, int result) { 621 if (!fuzzer::RunningUserCallback) return; 622 if (result == 0) return; // No reason to mutate. 623 if (n <= 1) return; // Not interesting. 624 fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, n, /*StopAtZero*/false); 625 } 626 627 ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY 628 void __sanitizer_weak_hook_strncmp(void *caller_pc, const char *s1, 629 const char *s2, size_t n, int result) { 630 if (!fuzzer::RunningUserCallback) return; 631 if (result == 0) return; // No reason to mutate. 632 size_t Len1 = fuzzer::InternalStrnlen(s1, n); 633 size_t Len2 = fuzzer::InternalStrnlen(s2, n); 634 n = std::min(n, Len1); 635 n = std::min(n, Len2); 636 if (n <= 1) return; // Not interesting. 637 fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, n, /*StopAtZero*/true); 638 } 639 640 ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY 641 void __sanitizer_weak_hook_strcmp(void *caller_pc, const char *s1, 642 const char *s2, int result) { 643 if (!fuzzer::RunningUserCallback) return; 644 if (result == 0) return; // No reason to mutate. 645 size_t N = fuzzer::InternalStrnlen2(s1, s2); 646 if (N <= 1) return; // Not interesting. 647 fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, N, /*StopAtZero*/true); 648 } 649 650 ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY 651 void __sanitizer_weak_hook_strncasecmp(void *called_pc, const char *s1, 652 const char *s2, size_t n, int result) { 653 if (!fuzzer::RunningUserCallback) return; 654 return __sanitizer_weak_hook_strncmp(called_pc, s1, s2, n, result); 655 } 656 657 ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY 658 void __sanitizer_weak_hook_strcasecmp(void *called_pc, const char *s1, 659 const char *s2, int result) { 660 if (!fuzzer::RunningUserCallback) return; 661 return __sanitizer_weak_hook_strcmp(called_pc, s1, s2, result); 662 } 663 664 ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY 665 void __sanitizer_weak_hook_strstr(void *called_pc, const char *s1, 666 const char *s2, char *result) { 667 if (!fuzzer::RunningUserCallback) return; 668 fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), strlen(s2)); 669 } 670 671 ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY 672 void __sanitizer_weak_hook_strcasestr(void *called_pc, const char *s1, 673 const char *s2, char *result) { 674 if (!fuzzer::RunningUserCallback) return; 675 fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), strlen(s2)); 676 } 677 678 ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY 679 void __sanitizer_weak_hook_memmem(void *called_pc, const void *s1, size_t len1, 680 const void *s2, size_t len2, void *result) { 681 if (!fuzzer::RunningUserCallback) return; 682 fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), len2); 683 } 684 } // extern "C" 685