1 //===-- tsan_platform.h -----------------------------------------*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file is a part of ThreadSanitizer (TSan), a race detector.
10 //
11 // Platform-specific code.
12 //===----------------------------------------------------------------------===//
13
14 #ifndef TSAN_PLATFORM_H
15 #define TSAN_PLATFORM_H
16
17 #if !defined(__LP64__) && !defined(_WIN64)
18 # error "Only 64-bit is supported"
19 #endif
20
21 #include "tsan_defs.h"
22 #include "tsan_trace.h"
23
24 namespace __tsan {
25
26 #if defined(__x86_64__)
27 #define HAS_48_BIT_ADDRESS_SPACE 1
28 #elif SANITIZER_IOSSIM // arm64 iOS simulators (order of #if matters)
29 #define HAS_48_BIT_ADDRESS_SPACE 1
30 #elif SANITIZER_IOS // arm64 iOS devices (order of #if matters)
31 #define HAS_48_BIT_ADDRESS_SPACE 0
32 #elif SANITIZER_MAC // arm64 macOS (order of #if matters)
33 #define HAS_48_BIT_ADDRESS_SPACE 1
34 #else
35 #define HAS_48_BIT_ADDRESS_SPACE 0
36 #endif
37
38 #if !SANITIZER_GO
39
40 #if HAS_48_BIT_ADDRESS_SPACE
41 /*
42 C/C++ on linux/x86_64 and freebsd/x86_64
43 0000 0000 1000 - 0080 0000 0000: main binary and/or MAP_32BIT mappings (512GB)
44 0040 0000 0000 - 0100 0000 0000: -
45 0100 0000 0000 - 2000 0000 0000: shadow
46 2000 0000 0000 - 3000 0000 0000: -
47 3000 0000 0000 - 4000 0000 0000: metainfo (memory blocks and sync objects)
48 4000 0000 0000 - 5500 0000 0000: -
49 5500 0000 0000 - 5680 0000 0000: pie binaries without ASLR or on 4.1+ kernels
50 5680 0000 0000 - 6000 0000 0000: -
51 6000 0000 0000 - 6200 0000 0000: traces
52 6200 0000 0000 - 7d00 0000 0000: -
53 7b00 0000 0000 - 7c00 0000 0000: heap
54 7c00 0000 0000 - 7e80 0000 0000: -
55 7e80 0000 0000 - 8000 0000 0000: modules and main thread stack
56
57 C/C++ on netbsd/amd64 can reuse the same mapping:
58 * The address space starts from 0x1000 (option with 0x0) and ends with
59 0x7f7ffffff000.
60 * LoAppMem-kHeapMemEnd can be reused as it is.
61 * No VDSO support.
62 * No MidAppMem region.
63 * No additional HeapMem region.
64 * HiAppMem contains the stack, loader, shared libraries and heap.
65 * Stack on NetBSD/amd64 has prereserved 128MB.
66 * Heap grows downwards (top-down).
67 * ASLR must be disabled per-process or globally.
68
69 */
70 struct Mapping {
71 static const uptr kMetaShadowBeg = 0x300000000000ull;
72 static const uptr kMetaShadowEnd = 0x340000000000ull;
73 static const uptr kTraceMemBeg = 0x600000000000ull;
74 static const uptr kTraceMemEnd = 0x620000000000ull;
75 static const uptr kShadowBeg = 0x010000000000ull;
76 static const uptr kShadowEnd = 0x200000000000ull;
77 static const uptr kHeapMemBeg = 0x7b0000000000ull;
78 static const uptr kHeapMemEnd = 0x7c0000000000ull;
79 static const uptr kLoAppMemBeg = 0x000000001000ull;
80 static const uptr kLoAppMemEnd = 0x008000000000ull;
81 static const uptr kMidAppMemBeg = 0x550000000000ull;
82 static const uptr kMidAppMemEnd = 0x568000000000ull;
83 static const uptr kHiAppMemBeg = 0x7e8000000000ull;
84 static const uptr kHiAppMemEnd = 0x800000000000ull;
85 static const uptr kAppMemMsk = 0x780000000000ull;
86 static const uptr kAppMemXor = 0x040000000000ull;
87 static const uptr kVdsoBeg = 0xf000000000000000ull;
88 };
89
90 #define TSAN_MID_APP_RANGE 1
91 #elif defined(__mips64)
92 /*
93 C/C++ on linux/mips64 (40-bit VMA)
94 0000 0000 00 - 0100 0000 00: - (4 GB)
95 0100 0000 00 - 0200 0000 00: main binary (4 GB)
96 0200 0000 00 - 2000 0000 00: - (120 GB)
97 2000 0000 00 - 4000 0000 00: shadow (128 GB)
98 4000 0000 00 - 5000 0000 00: metainfo (memory blocks and sync objects) (64 GB)
99 5000 0000 00 - aa00 0000 00: - (360 GB)
100 aa00 0000 00 - ab00 0000 00: main binary (PIE) (4 GB)
101 ab00 0000 00 - b000 0000 00: - (20 GB)
102 b000 0000 00 - b200 0000 00: traces (8 GB)
103 b200 0000 00 - fe00 0000 00: - (304 GB)
104 fe00 0000 00 - ff00 0000 00: heap (4 GB)
105 ff00 0000 00 - ff80 0000 00: - (2 GB)
106 ff80 0000 00 - ffff ffff ff: modules and main thread stack (<2 GB)
107 */
108 struct Mapping40 {
109 static const uptr kMetaShadowBeg = 0x4000000000ull;
110 static const uptr kMetaShadowEnd = 0x5000000000ull;
111 static const uptr kTraceMemBeg = 0xb000000000ull;
112 static const uptr kTraceMemEnd = 0xb200000000ull;
113 static const uptr kShadowBeg = 0x2000000000ull;
114 static const uptr kShadowEnd = 0x4000000000ull;
115 static const uptr kHeapMemBeg = 0xfe00000000ull;
116 static const uptr kHeapMemEnd = 0xff00000000ull;
117 static const uptr kLoAppMemBeg = 0x0100000000ull;
118 static const uptr kLoAppMemEnd = 0x0200000000ull;
119 static const uptr kMidAppMemBeg = 0xaa00000000ull;
120 static const uptr kMidAppMemEnd = 0xab00000000ull;
121 static const uptr kHiAppMemBeg = 0xff80000000ull;
122 static const uptr kHiAppMemEnd = 0xffffffffffull;
123 static const uptr kAppMemMsk = 0xf800000000ull;
124 static const uptr kAppMemXor = 0x0800000000ull;
125 static const uptr kVdsoBeg = 0xfffff00000ull;
126 };
127
128 #define TSAN_MID_APP_RANGE 1
129 #define TSAN_RUNTIME_VMA 1
130 #elif defined(__aarch64__) && defined(__APPLE__)
131 /*
132 C/C++ on Darwin/iOS/ARM64 (36-bit VMA, 64 GB VM)
133 0000 0000 00 - 0100 0000 00: - (4 GB)
134 0100 0000 00 - 0200 0000 00: main binary, modules, thread stacks (4 GB)
135 0200 0000 00 - 0300 0000 00: heap (4 GB)
136 0300 0000 00 - 0400 0000 00: - (4 GB)
137 0400 0000 00 - 0c00 0000 00: shadow memory (32 GB)
138 0c00 0000 00 - 0d00 0000 00: - (4 GB)
139 0d00 0000 00 - 0e00 0000 00: metainfo (4 GB)
140 0e00 0000 00 - 0f00 0000 00: - (4 GB)
141 0f00 0000 00 - 0fc0 0000 00: traces (3 GB)
142 0fc0 0000 00 - 1000 0000 00: -
143 */
144 struct Mapping {
145 static const uptr kLoAppMemBeg = 0x0100000000ull;
146 static const uptr kLoAppMemEnd = 0x0200000000ull;
147 static const uptr kHeapMemBeg = 0x0200000000ull;
148 static const uptr kHeapMemEnd = 0x0300000000ull;
149 static const uptr kShadowBeg = 0x0400000000ull;
150 static const uptr kShadowEnd = 0x0c00000000ull;
151 static const uptr kMetaShadowBeg = 0x0d00000000ull;
152 static const uptr kMetaShadowEnd = 0x0e00000000ull;
153 static const uptr kTraceMemBeg = 0x0f00000000ull;
154 static const uptr kTraceMemEnd = 0x0fc0000000ull;
155 static const uptr kHiAppMemBeg = 0x0fc0000000ull;
156 static const uptr kHiAppMemEnd = 0x0fc0000000ull;
157 static const uptr kAppMemMsk = 0x0ull;
158 static const uptr kAppMemXor = 0x0ull;
159 static const uptr kVdsoBeg = 0x7000000000000000ull;
160 };
161
162 #elif defined(__aarch64__) && !defined(__APPLE__)
163 // AArch64 supports multiple VMA which leads to multiple address transformation
164 // functions. To support these multiple VMAS transformations and mappings TSAN
165 // runtime for AArch64 uses an external memory read (vmaSize) to select which
166 // mapping to use. Although slower, it make a same instrumented binary run on
167 // multiple kernels.
168
169 /*
170 C/C++ on linux/aarch64 (39-bit VMA)
171 0000 0010 00 - 0100 0000 00: main binary
172 0100 0000 00 - 0800 0000 00: -
173 0800 0000 00 - 2000 0000 00: shadow memory
174 2000 0000 00 - 3100 0000 00: -
175 3100 0000 00 - 3400 0000 00: metainfo
176 3400 0000 00 - 5500 0000 00: -
177 5500 0000 00 - 5600 0000 00: main binary (PIE)
178 5600 0000 00 - 6000 0000 00: -
179 6000 0000 00 - 6200 0000 00: traces
180 6200 0000 00 - 7d00 0000 00: -
181 7c00 0000 00 - 7d00 0000 00: heap
182 7d00 0000 00 - 7fff ffff ff: modules and main thread stack
183 */
184 struct Mapping39 {
185 static const uptr kLoAppMemBeg = 0x0000001000ull;
186 static const uptr kLoAppMemEnd = 0x0100000000ull;
187 static const uptr kShadowBeg = 0x0800000000ull;
188 static const uptr kShadowEnd = 0x2000000000ull;
189 static const uptr kMetaShadowBeg = 0x3100000000ull;
190 static const uptr kMetaShadowEnd = 0x3400000000ull;
191 static const uptr kMidAppMemBeg = 0x5500000000ull;
192 static const uptr kMidAppMemEnd = 0x5600000000ull;
193 static const uptr kTraceMemBeg = 0x6000000000ull;
194 static const uptr kTraceMemEnd = 0x6200000000ull;
195 static const uptr kHeapMemBeg = 0x7c00000000ull;
196 static const uptr kHeapMemEnd = 0x7d00000000ull;
197 static const uptr kHiAppMemBeg = 0x7e00000000ull;
198 static const uptr kHiAppMemEnd = 0x7fffffffffull;
199 static const uptr kAppMemMsk = 0x7800000000ull;
200 static const uptr kAppMemXor = 0x0200000000ull;
201 static const uptr kVdsoBeg = 0x7f00000000ull;
202 };
203
204 /*
205 C/C++ on linux/aarch64 (42-bit VMA)
206 00000 0010 00 - 01000 0000 00: main binary
207 01000 0000 00 - 10000 0000 00: -
208 10000 0000 00 - 20000 0000 00: shadow memory
209 20000 0000 00 - 26000 0000 00: -
210 26000 0000 00 - 28000 0000 00: metainfo
211 28000 0000 00 - 2aa00 0000 00: -
212 2aa00 0000 00 - 2ab00 0000 00: main binary (PIE)
213 2ab00 0000 00 - 36200 0000 00: -
214 36200 0000 00 - 36240 0000 00: traces
215 36240 0000 00 - 3e000 0000 00: -
216 3e000 0000 00 - 3f000 0000 00: heap
217 3f000 0000 00 - 3ffff ffff ff: modules and main thread stack
218 */
219 struct Mapping42 {
220 static const uptr kLoAppMemBeg = 0x00000001000ull;
221 static const uptr kLoAppMemEnd = 0x01000000000ull;
222 static const uptr kShadowBeg = 0x10000000000ull;
223 static const uptr kShadowEnd = 0x20000000000ull;
224 static const uptr kMetaShadowBeg = 0x26000000000ull;
225 static const uptr kMetaShadowEnd = 0x28000000000ull;
226 static const uptr kMidAppMemBeg = 0x2aa00000000ull;
227 static const uptr kMidAppMemEnd = 0x2ab00000000ull;
228 static const uptr kTraceMemBeg = 0x36200000000ull;
229 static const uptr kTraceMemEnd = 0x36400000000ull;
230 static const uptr kHeapMemBeg = 0x3e000000000ull;
231 static const uptr kHeapMemEnd = 0x3f000000000ull;
232 static const uptr kHiAppMemBeg = 0x3f000000000ull;
233 static const uptr kHiAppMemEnd = 0x3ffffffffffull;
234 static const uptr kAppMemMsk = 0x3c000000000ull;
235 static const uptr kAppMemXor = 0x04000000000ull;
236 static const uptr kVdsoBeg = 0x37f00000000ull;
237 };
238
239 struct Mapping48 {
240 static const uptr kLoAppMemBeg = 0x0000000001000ull;
241 static const uptr kLoAppMemEnd = 0x0000200000000ull;
242 static const uptr kShadowBeg = 0x0002000000000ull;
243 static const uptr kShadowEnd = 0x0004000000000ull;
244 static const uptr kMetaShadowBeg = 0x0005000000000ull;
245 static const uptr kMetaShadowEnd = 0x0006000000000ull;
246 static const uptr kMidAppMemBeg = 0x0aaaa00000000ull;
247 static const uptr kMidAppMemEnd = 0x0aaaf00000000ull;
248 static const uptr kTraceMemBeg = 0x0f06000000000ull;
249 static const uptr kTraceMemEnd = 0x0f06200000000ull;
250 static const uptr kHeapMemBeg = 0x0ffff00000000ull;
251 static const uptr kHeapMemEnd = 0x0ffff00000000ull;
252 static const uptr kHiAppMemBeg = 0x0ffff00000000ull;
253 static const uptr kHiAppMemEnd = 0x1000000000000ull;
254 static const uptr kAppMemMsk = 0x0fff800000000ull;
255 static const uptr kAppMemXor = 0x0000800000000ull;
256 static const uptr kVdsoBeg = 0xffff000000000ull;
257 };
258
259 // Indicates the runtime will define the memory regions at runtime.
260 #define TSAN_RUNTIME_VMA 1
261 // Indicates that mapping defines a mid range memory segment.
262 #define TSAN_MID_APP_RANGE 1
263 #elif defined(__powerpc64__)
264 // PPC64 supports multiple VMA which leads to multiple address transformation
265 // functions. To support these multiple VMAS transformations and mappings TSAN
266 // runtime for PPC64 uses an external memory read (vmaSize) to select which
267 // mapping to use. Although slower, it make a same instrumented binary run on
268 // multiple kernels.
269
270 /*
271 C/C++ on linux/powerpc64 (44-bit VMA)
272 0000 0000 0100 - 0001 0000 0000: main binary
273 0001 0000 0000 - 0001 0000 0000: -
274 0001 0000 0000 - 0b00 0000 0000: shadow
275 0b00 0000 0000 - 0b00 0000 0000: -
276 0b00 0000 0000 - 0d00 0000 0000: metainfo (memory blocks and sync objects)
277 0d00 0000 0000 - 0d00 0000 0000: -
278 0d00 0000 0000 - 0f00 0000 0000: traces
279 0f00 0000 0000 - 0f00 0000 0000: -
280 0f00 0000 0000 - 0f50 0000 0000: heap
281 0f50 0000 0000 - 0f60 0000 0000: -
282 0f60 0000 0000 - 1000 0000 0000: modules and main thread stack
283 */
284 struct Mapping44 {
285 static const uptr kMetaShadowBeg = 0x0b0000000000ull;
286 static const uptr kMetaShadowEnd = 0x0d0000000000ull;
287 static const uptr kTraceMemBeg = 0x0d0000000000ull;
288 static const uptr kTraceMemEnd = 0x0f0000000000ull;
289 static const uptr kShadowBeg = 0x000100000000ull;
290 static const uptr kShadowEnd = 0x0b0000000000ull;
291 static const uptr kLoAppMemBeg = 0x000000000100ull;
292 static const uptr kLoAppMemEnd = 0x000100000000ull;
293 static const uptr kHeapMemBeg = 0x0f0000000000ull;
294 static const uptr kHeapMemEnd = 0x0f5000000000ull;
295 static const uptr kHiAppMemBeg = 0x0f6000000000ull;
296 static const uptr kHiAppMemEnd = 0x100000000000ull; // 44 bits
297 static const uptr kAppMemMsk = 0x0f0000000000ull;
298 static const uptr kAppMemXor = 0x002100000000ull;
299 static const uptr kVdsoBeg = 0x3c0000000000000ull;
300 };
301
302 /*
303 C/C++ on linux/powerpc64 (46-bit VMA)
304 0000 0000 1000 - 0100 0000 0000: main binary
305 0100 0000 0000 - 0200 0000 0000: -
306 0100 0000 0000 - 1000 0000 0000: shadow
307 1000 0000 0000 - 1000 0000 0000: -
308 1000 0000 0000 - 2000 0000 0000: metainfo (memory blocks and sync objects)
309 2000 0000 0000 - 2000 0000 0000: -
310 2000 0000 0000 - 2200 0000 0000: traces
311 2200 0000 0000 - 3d00 0000 0000: -
312 3d00 0000 0000 - 3e00 0000 0000: heap
313 3e00 0000 0000 - 3e80 0000 0000: -
314 3e80 0000 0000 - 4000 0000 0000: modules and main thread stack
315 */
316 struct Mapping46 {
317 static const uptr kMetaShadowBeg = 0x100000000000ull;
318 static const uptr kMetaShadowEnd = 0x200000000000ull;
319 static const uptr kTraceMemBeg = 0x200000000000ull;
320 static const uptr kTraceMemEnd = 0x220000000000ull;
321 static const uptr kShadowBeg = 0x010000000000ull;
322 static const uptr kShadowEnd = 0x100000000000ull;
323 static const uptr kHeapMemBeg = 0x3d0000000000ull;
324 static const uptr kHeapMemEnd = 0x3e0000000000ull;
325 static const uptr kLoAppMemBeg = 0x000000001000ull;
326 static const uptr kLoAppMemEnd = 0x010000000000ull;
327 static const uptr kHiAppMemBeg = 0x3e8000000000ull;
328 static const uptr kHiAppMemEnd = 0x400000000000ull; // 46 bits
329 static const uptr kAppMemMsk = 0x3c0000000000ull;
330 static const uptr kAppMemXor = 0x020000000000ull;
331 static const uptr kVdsoBeg = 0x7800000000000000ull;
332 };
333
334 /*
335 C/C++ on linux/powerpc64 (47-bit VMA)
336 0000 0000 1000 - 0100 0000 0000: main binary
337 0100 0000 0000 - 0200 0000 0000: -
338 0100 0000 0000 - 1000 0000 0000: shadow
339 1000 0000 0000 - 1000 0000 0000: -
340 1000 0000 0000 - 2000 0000 0000: metainfo (memory blocks and sync objects)
341 2000 0000 0000 - 2000 0000 0000: -
342 2000 0000 0000 - 2200 0000 0000: traces
343 2200 0000 0000 - 7d00 0000 0000: -
344 7d00 0000 0000 - 7e00 0000 0000: heap
345 7e00 0000 0000 - 7e80 0000 0000: -
346 7e80 0000 0000 - 8000 0000 0000: modules and main thread stack
347 */
348 struct Mapping47 {
349 static const uptr kMetaShadowBeg = 0x100000000000ull;
350 static const uptr kMetaShadowEnd = 0x200000000000ull;
351 static const uptr kTraceMemBeg = 0x200000000000ull;
352 static const uptr kTraceMemEnd = 0x220000000000ull;
353 static const uptr kShadowBeg = 0x010000000000ull;
354 static const uptr kShadowEnd = 0x100000000000ull;
355 static const uptr kHeapMemBeg = 0x7d0000000000ull;
356 static const uptr kHeapMemEnd = 0x7e0000000000ull;
357 static const uptr kLoAppMemBeg = 0x000000001000ull;
358 static const uptr kLoAppMemEnd = 0x010000000000ull;
359 static const uptr kHiAppMemBeg = 0x7e8000000000ull;
360 static const uptr kHiAppMemEnd = 0x800000000000ull; // 47 bits
361 static const uptr kAppMemMsk = 0x7c0000000000ull;
362 static const uptr kAppMemXor = 0x020000000000ull;
363 static const uptr kVdsoBeg = 0x7800000000000000ull;
364 };
365
366 // Indicates the runtime will define the memory regions at runtime.
367 #define TSAN_RUNTIME_VMA 1
368 #elif defined(__s390x__)
369 /*
370 C/C++ on linux/s390x
371 While the kernel provides a 64-bit address space, we have to restrict ourselves
372 to 48 bits due to how e.g. SyncVar::GetId() works.
373 0000 0000 1000 - 0e00 0000 0000: binary, modules, stacks - 14 TiB
374 0e00 0000 0000 - 4000 0000 0000: -
375 4000 0000 0000 - 8000 0000 0000: shadow - 64TiB (4 * app)
376 8000 0000 0000 - 9000 0000 0000: -
377 9000 0000 0000 - 9800 0000 0000: metainfo - 8TiB (0.5 * app)
378 9800 0000 0000 - a000 0000 0000: -
379 a000 0000 0000 - b000 0000 0000: traces - 16TiB (max history * 128k threads)
380 b000 0000 0000 - be00 0000 0000: -
381 be00 0000 0000 - c000 0000 0000: heap - 2TiB (max supported by the allocator)
382 */
383 struct Mapping {
384 static const uptr kMetaShadowBeg = 0x900000000000ull;
385 static const uptr kMetaShadowEnd = 0x980000000000ull;
386 static const uptr kTraceMemBeg = 0xa00000000000ull;
387 static const uptr kTraceMemEnd = 0xb00000000000ull;
388 static const uptr kShadowBeg = 0x400000000000ull;
389 static const uptr kShadowEnd = 0x800000000000ull;
390 static const uptr kHeapMemBeg = 0xbe0000000000ull;
391 static const uptr kHeapMemEnd = 0xc00000000000ull;
392 static const uptr kLoAppMemBeg = 0x000000001000ull;
393 static const uptr kLoAppMemEnd = 0x0e0000000000ull;
394 static const uptr kHiAppMemBeg = 0xc00000004000ull;
395 static const uptr kHiAppMemEnd = 0xc00000004000ull;
396 static const uptr kAppMemMsk = 0xb00000000000ull;
397 static const uptr kAppMemXor = 0x100000000000ull;
398 static const uptr kVdsoBeg = 0xfffffffff000ull;
399 };
400 #endif
401
402 #elif SANITIZER_GO && !SANITIZER_WINDOWS && HAS_48_BIT_ADDRESS_SPACE
403
404 /* Go on linux, darwin and freebsd on x86_64
405 0000 0000 1000 - 0000 1000 0000: executable
406 0000 1000 0000 - 00c0 0000 0000: -
407 00c0 0000 0000 - 00e0 0000 0000: heap
408 00e0 0000 0000 - 2000 0000 0000: -
409 2000 0000 0000 - 2380 0000 0000: shadow
410 2380 0000 0000 - 3000 0000 0000: -
411 3000 0000 0000 - 4000 0000 0000: metainfo (memory blocks and sync objects)
412 4000 0000 0000 - 6000 0000 0000: -
413 6000 0000 0000 - 6200 0000 0000: traces
414 6200 0000 0000 - 8000 0000 0000: -
415 */
416
417 struct Mapping {
418 static const uptr kMetaShadowBeg = 0x300000000000ull;
419 static const uptr kMetaShadowEnd = 0x400000000000ull;
420 static const uptr kTraceMemBeg = 0x600000000000ull;
421 static const uptr kTraceMemEnd = 0x620000000000ull;
422 static const uptr kShadowBeg = 0x200000000000ull;
423 static const uptr kShadowEnd = 0x238000000000ull;
424 static const uptr kAppMemBeg = 0x000000001000ull;
425 static const uptr kAppMemEnd = 0x00e000000000ull;
426 };
427
428 #elif SANITIZER_GO && SANITIZER_WINDOWS
429
430 /* Go on windows
431 0000 0000 1000 - 0000 1000 0000: executable
432 0000 1000 0000 - 00f8 0000 0000: -
433 00c0 0000 0000 - 00e0 0000 0000: heap
434 00e0 0000 0000 - 0100 0000 0000: -
435 0100 0000 0000 - 0500 0000 0000: shadow
436 0500 0000 0000 - 0560 0000 0000: -
437 0560 0000 0000 - 0760 0000 0000: traces
438 0760 0000 0000 - 07d0 0000 0000: metainfo (memory blocks and sync objects)
439 07d0 0000 0000 - 8000 0000 0000: -
440 */
441
442 struct Mapping {
443 static const uptr kMetaShadowBeg = 0x076000000000ull;
444 static const uptr kMetaShadowEnd = 0x07d000000000ull;
445 static const uptr kTraceMemBeg = 0x056000000000ull;
446 static const uptr kTraceMemEnd = 0x076000000000ull;
447 static const uptr kShadowBeg = 0x010000000000ull;
448 static const uptr kShadowEnd = 0x050000000000ull;
449 static const uptr kAppMemBeg = 0x000000001000ull;
450 static const uptr kAppMemEnd = 0x00e000000000ull;
451 };
452
453 #elif SANITIZER_GO && defined(__powerpc64__)
454
455 /* Only Mapping46 and Mapping47 are currently supported for powercp64 on Go. */
456
457 /* Go on linux/powerpc64 (46-bit VMA)
458 0000 0000 1000 - 0000 1000 0000: executable
459 0000 1000 0000 - 00c0 0000 0000: -
460 00c0 0000 0000 - 00e0 0000 0000: heap
461 00e0 0000 0000 - 2000 0000 0000: -
462 2000 0000 0000 - 2380 0000 0000: shadow
463 2380 0000 0000 - 2400 0000 0000: -
464 2400 0000 0000 - 3400 0000 0000: metainfo (memory blocks and sync objects)
465 3400 0000 0000 - 3600 0000 0000: -
466 3600 0000 0000 - 3800 0000 0000: traces
467 3800 0000 0000 - 4000 0000 0000: -
468 */
469
470 struct Mapping46 {
471 static const uptr kMetaShadowBeg = 0x240000000000ull;
472 static const uptr kMetaShadowEnd = 0x340000000000ull;
473 static const uptr kTraceMemBeg = 0x360000000000ull;
474 static const uptr kTraceMemEnd = 0x380000000000ull;
475 static const uptr kShadowBeg = 0x200000000000ull;
476 static const uptr kShadowEnd = 0x238000000000ull;
477 static const uptr kAppMemBeg = 0x000000001000ull;
478 static const uptr kAppMemEnd = 0x00e000000000ull;
479 };
480
481 /* Go on linux/powerpc64 (47-bit VMA)
482 0000 0000 1000 - 0000 1000 0000: executable
483 0000 1000 0000 - 00c0 0000 0000: -
484 00c0 0000 0000 - 00e0 0000 0000: heap
485 00e0 0000 0000 - 2000 0000 0000: -
486 2000 0000 0000 - 3000 0000 0000: shadow
487 3000 0000 0000 - 3000 0000 0000: -
488 3000 0000 0000 - 4000 0000 0000: metainfo (memory blocks and sync objects)
489 4000 0000 0000 - 6000 0000 0000: -
490 6000 0000 0000 - 6200 0000 0000: traces
491 6200 0000 0000 - 8000 0000 0000: -
492 */
493
494 struct Mapping47 {
495 static const uptr kMetaShadowBeg = 0x300000000000ull;
496 static const uptr kMetaShadowEnd = 0x400000000000ull;
497 static const uptr kTraceMemBeg = 0x600000000000ull;
498 static const uptr kTraceMemEnd = 0x620000000000ull;
499 static const uptr kShadowBeg = 0x200000000000ull;
500 static const uptr kShadowEnd = 0x300000000000ull;
501 static const uptr kAppMemBeg = 0x000000001000ull;
502 static const uptr kAppMemEnd = 0x00e000000000ull;
503 };
504
505 #define TSAN_RUNTIME_VMA 1
506
507 #elif SANITIZER_GO && defined(__aarch64__)
508
509 /* Go on linux/aarch64 (48-bit VMA) and darwin/aarch64 (47-bit VMA)
510 0000 0000 1000 - 0000 1000 0000: executable
511 0000 1000 0000 - 00c0 0000 0000: -
512 00c0 0000 0000 - 00e0 0000 0000: heap
513 00e0 0000 0000 - 2000 0000 0000: -
514 2000 0000 0000 - 3000 0000 0000: shadow
515 3000 0000 0000 - 3000 0000 0000: -
516 3000 0000 0000 - 4000 0000 0000: metainfo (memory blocks and sync objects)
517 4000 0000 0000 - 6000 0000 0000: -
518 6000 0000 0000 - 6200 0000 0000: traces
519 6200 0000 0000 - 8000 0000 0000: -
520 */
521
522 struct Mapping {
523 static const uptr kMetaShadowBeg = 0x300000000000ull;
524 static const uptr kMetaShadowEnd = 0x400000000000ull;
525 static const uptr kTraceMemBeg = 0x600000000000ull;
526 static const uptr kTraceMemEnd = 0x620000000000ull;
527 static const uptr kShadowBeg = 0x200000000000ull;
528 static const uptr kShadowEnd = 0x300000000000ull;
529 static const uptr kAppMemBeg = 0x000000001000ull;
530 static const uptr kAppMemEnd = 0x00e000000000ull;
531 };
532
533 // Indicates the runtime will define the memory regions at runtime.
534 #define TSAN_RUNTIME_VMA 1
535
536 #elif SANITIZER_GO && defined(__mips64)
537 /*
538 Go on linux/mips64 (47-bit VMA)
539 0000 0000 1000 - 0000 1000 0000: executable
540 0000 1000 0000 - 00c0 0000 0000: -
541 00c0 0000 0000 - 00e0 0000 0000: heap
542 00e0 0000 0000 - 2000 0000 0000: -
543 2000 0000 0000 - 3000 0000 0000: shadow
544 3000 0000 0000 - 3000 0000 0000: -
545 3000 0000 0000 - 4000 0000 0000: metainfo (memory blocks and sync objects)
546 4000 0000 0000 - 6000 0000 0000: -
547 6000 0000 0000 - 6200 0000 0000: traces
548 6200 0000 0000 - 8000 0000 0000: -
549 */
550 struct Mapping47 {
551 static const uptr kMetaShadowBeg = 0x300000000000ull;
552 static const uptr kMetaShadowEnd = 0x400000000000ull;
553 static const uptr kTraceMemBeg = 0x600000000000ull;
554 static const uptr kTraceMemEnd = 0x620000000000ull;
555 static const uptr kShadowBeg = 0x200000000000ull;
556 static const uptr kShadowEnd = 0x300000000000ull;
557 static const uptr kAppMemBeg = 0x000000001000ull;
558 static const uptr kAppMemEnd = 0x00e000000000ull;
559 };
560
561 #define TSAN_RUNTIME_VMA 1
562
563 #elif SANITIZER_GO && defined(__s390x__)
564 /*
565 Go on linux/s390x
566 0000 0000 1000 - 1000 0000 0000: executable and heap - 16 TiB
567 1000 0000 0000 - 4000 0000 0000: -
568 4000 0000 0000 - 8000 0000 0000: shadow - 64TiB (4 * app)
569 8000 0000 0000 - 9000 0000 0000: -
570 9000 0000 0000 - 9800 0000 0000: metainfo - 8TiB (0.5 * app)
571 9800 0000 0000 - a000 0000 0000: -
572 a000 0000 0000 - b000 0000 0000: traces - 16TiB (max history * 128k threads)
573 */
574 struct Mapping {
575 static const uptr kMetaShadowBeg = 0x900000000000ull;
576 static const uptr kMetaShadowEnd = 0x980000000000ull;
577 static const uptr kTraceMemBeg = 0xa00000000000ull;
578 static const uptr kTraceMemEnd = 0xb00000000000ull;
579 static const uptr kShadowBeg = 0x400000000000ull;
580 static const uptr kShadowEnd = 0x800000000000ull;
581 static const uptr kAppMemBeg = 0x000000001000ull;
582 static const uptr kAppMemEnd = 0x100000000000ull;
583 };
584
585 #else
586 # error "Unknown platform"
587 #endif
588
589
590 #ifdef TSAN_RUNTIME_VMA
591 extern uptr vmaSize;
592 #endif
593
594
595 enum MappingType {
596 MAPPING_LO_APP_BEG,
597 MAPPING_LO_APP_END,
598 MAPPING_HI_APP_BEG,
599 MAPPING_HI_APP_END,
600 #ifdef TSAN_MID_APP_RANGE
601 MAPPING_MID_APP_BEG,
602 MAPPING_MID_APP_END,
603 #endif
604 MAPPING_HEAP_BEG,
605 MAPPING_HEAP_END,
606 MAPPING_APP_BEG,
607 MAPPING_APP_END,
608 MAPPING_SHADOW_BEG,
609 MAPPING_SHADOW_END,
610 MAPPING_META_SHADOW_BEG,
611 MAPPING_META_SHADOW_END,
612 MAPPING_TRACE_BEG,
613 MAPPING_TRACE_END,
614 MAPPING_VDSO_BEG,
615 };
616
617 template<typename Mapping, int Type>
MappingImpl(void)618 uptr MappingImpl(void) {
619 switch (Type) {
620 #if !SANITIZER_GO
621 case MAPPING_LO_APP_BEG: return Mapping::kLoAppMemBeg;
622 case MAPPING_LO_APP_END: return Mapping::kLoAppMemEnd;
623 # ifdef TSAN_MID_APP_RANGE
624 case MAPPING_MID_APP_BEG: return Mapping::kMidAppMemBeg;
625 case MAPPING_MID_APP_END: return Mapping::kMidAppMemEnd;
626 # endif
627 case MAPPING_HI_APP_BEG: return Mapping::kHiAppMemBeg;
628 case MAPPING_HI_APP_END: return Mapping::kHiAppMemEnd;
629 case MAPPING_HEAP_BEG: return Mapping::kHeapMemBeg;
630 case MAPPING_HEAP_END: return Mapping::kHeapMemEnd;
631 case MAPPING_VDSO_BEG: return Mapping::kVdsoBeg;
632 #else
633 case MAPPING_APP_BEG: return Mapping::kAppMemBeg;
634 case MAPPING_APP_END: return Mapping::kAppMemEnd;
635 #endif
636 case MAPPING_SHADOW_BEG: return Mapping::kShadowBeg;
637 case MAPPING_SHADOW_END: return Mapping::kShadowEnd;
638 case MAPPING_META_SHADOW_BEG: return Mapping::kMetaShadowBeg;
639 case MAPPING_META_SHADOW_END: return Mapping::kMetaShadowEnd;
640 case MAPPING_TRACE_BEG: return Mapping::kTraceMemBeg;
641 case MAPPING_TRACE_END: return Mapping::kTraceMemEnd;
642 }
643 }
644
645 template<int Type>
MappingArchImpl(void)646 uptr MappingArchImpl(void) {
647 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
648 switch (vmaSize) {
649 case 39: return MappingImpl<Mapping39, Type>();
650 case 42: return MappingImpl<Mapping42, Type>();
651 case 48: return MappingImpl<Mapping48, Type>();
652 }
653 DCHECK(0);
654 return 0;
655 #elif defined(__powerpc64__)
656 switch (vmaSize) {
657 #if !SANITIZER_GO
658 case 44: return MappingImpl<Mapping44, Type>();
659 #endif
660 case 46: return MappingImpl<Mapping46, Type>();
661 case 47: return MappingImpl<Mapping47, Type>();
662 }
663 DCHECK(0);
664 return 0;
665 #elif defined(__mips64)
666 switch (vmaSize) {
667 #if !SANITIZER_GO
668 case 40: return MappingImpl<Mapping40, Type>();
669 #else
670 case 47: return MappingImpl<Mapping47, Type>();
671 #endif
672 }
673 DCHECK(0);
674 return 0;
675 #else
676 return MappingImpl<Mapping, Type>();
677 #endif
678 }
679
680 #if !SANITIZER_GO
681 ALWAYS_INLINE
LoAppMemBeg(void)682 uptr LoAppMemBeg(void) {
683 return MappingArchImpl<MAPPING_LO_APP_BEG>();
684 }
685 ALWAYS_INLINE
LoAppMemEnd(void)686 uptr LoAppMemEnd(void) {
687 return MappingArchImpl<MAPPING_LO_APP_END>();
688 }
689
690 #ifdef TSAN_MID_APP_RANGE
691 ALWAYS_INLINE
MidAppMemBeg(void)692 uptr MidAppMemBeg(void) {
693 return MappingArchImpl<MAPPING_MID_APP_BEG>();
694 }
695 ALWAYS_INLINE
MidAppMemEnd(void)696 uptr MidAppMemEnd(void) {
697 return MappingArchImpl<MAPPING_MID_APP_END>();
698 }
699 #endif
700
701 ALWAYS_INLINE
HeapMemBeg(void)702 uptr HeapMemBeg(void) {
703 return MappingArchImpl<MAPPING_HEAP_BEG>();
704 }
705 ALWAYS_INLINE
HeapMemEnd(void)706 uptr HeapMemEnd(void) {
707 return MappingArchImpl<MAPPING_HEAP_END>();
708 }
709
710 ALWAYS_INLINE
HiAppMemBeg(void)711 uptr HiAppMemBeg(void) {
712 return MappingArchImpl<MAPPING_HI_APP_BEG>();
713 }
714 ALWAYS_INLINE
HiAppMemEnd(void)715 uptr HiAppMemEnd(void) {
716 return MappingArchImpl<MAPPING_HI_APP_END>();
717 }
718
719 ALWAYS_INLINE
VdsoBeg(void)720 uptr VdsoBeg(void) {
721 return MappingArchImpl<MAPPING_VDSO_BEG>();
722 }
723
724 #else
725
726 ALWAYS_INLINE
AppMemBeg(void)727 uptr AppMemBeg(void) {
728 return MappingArchImpl<MAPPING_APP_BEG>();
729 }
730 ALWAYS_INLINE
AppMemEnd(void)731 uptr AppMemEnd(void) {
732 return MappingArchImpl<MAPPING_APP_END>();
733 }
734
735 #endif
736
737 static inline
GetUserRegion(int i,uptr * start,uptr * end)738 bool GetUserRegion(int i, uptr *start, uptr *end) {
739 switch (i) {
740 default:
741 return false;
742 #if !SANITIZER_GO
743 case 0:
744 *start = LoAppMemBeg();
745 *end = LoAppMemEnd();
746 return true;
747 case 1:
748 *start = HiAppMemBeg();
749 *end = HiAppMemEnd();
750 return true;
751 case 2:
752 *start = HeapMemBeg();
753 *end = HeapMemEnd();
754 return true;
755 # ifdef TSAN_MID_APP_RANGE
756 case 3:
757 *start = MidAppMemBeg();
758 *end = MidAppMemEnd();
759 return true;
760 # endif
761 #else
762 case 0:
763 *start = AppMemBeg();
764 *end = AppMemEnd();
765 return true;
766 #endif
767 }
768 }
769
770 ALWAYS_INLINE
ShadowBeg(void)771 uptr ShadowBeg(void) {
772 return MappingArchImpl<MAPPING_SHADOW_BEG>();
773 }
774 ALWAYS_INLINE
ShadowEnd(void)775 uptr ShadowEnd(void) {
776 return MappingArchImpl<MAPPING_SHADOW_END>();
777 }
778
779 ALWAYS_INLINE
MetaShadowBeg(void)780 uptr MetaShadowBeg(void) {
781 return MappingArchImpl<MAPPING_META_SHADOW_BEG>();
782 }
783 ALWAYS_INLINE
MetaShadowEnd(void)784 uptr MetaShadowEnd(void) {
785 return MappingArchImpl<MAPPING_META_SHADOW_END>();
786 }
787
788 ALWAYS_INLINE
TraceMemBeg(void)789 uptr TraceMemBeg(void) {
790 return MappingArchImpl<MAPPING_TRACE_BEG>();
791 }
792 ALWAYS_INLINE
TraceMemEnd(void)793 uptr TraceMemEnd(void) {
794 return MappingArchImpl<MAPPING_TRACE_END>();
795 }
796
797
798 template<typename Mapping>
IsAppMemImpl(uptr mem)799 bool IsAppMemImpl(uptr mem) {
800 #if !SANITIZER_GO
801 return (mem >= Mapping::kHeapMemBeg && mem < Mapping::kHeapMemEnd) ||
802 # ifdef TSAN_MID_APP_RANGE
803 (mem >= Mapping::kMidAppMemBeg && mem < Mapping::kMidAppMemEnd) ||
804 # endif
805 (mem >= Mapping::kLoAppMemBeg && mem < Mapping::kLoAppMemEnd) ||
806 (mem >= Mapping::kHiAppMemBeg && mem < Mapping::kHiAppMemEnd);
807 #else
808 return mem >= Mapping::kAppMemBeg && mem < Mapping::kAppMemEnd;
809 #endif
810 }
811
812 ALWAYS_INLINE
IsAppMem(uptr mem)813 bool IsAppMem(uptr mem) {
814 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
815 switch (vmaSize) {
816 case 39: return IsAppMemImpl<Mapping39>(mem);
817 case 42: return IsAppMemImpl<Mapping42>(mem);
818 case 48: return IsAppMemImpl<Mapping48>(mem);
819 }
820 DCHECK(0);
821 return false;
822 #elif defined(__powerpc64__)
823 switch (vmaSize) {
824 #if !SANITIZER_GO
825 case 44: return IsAppMemImpl<Mapping44>(mem);
826 #endif
827 case 46: return IsAppMemImpl<Mapping46>(mem);
828 case 47: return IsAppMemImpl<Mapping47>(mem);
829 }
830 DCHECK(0);
831 return false;
832 #elif defined(__mips64)
833 switch (vmaSize) {
834 #if !SANITIZER_GO
835 case 40: return IsAppMemImpl<Mapping40>(mem);
836 #else
837 case 47: return IsAppMemImpl<Mapping47>(mem);
838 #endif
839 }
840 DCHECK(0);
841 return false;
842 #else
843 return IsAppMemImpl<Mapping>(mem);
844 #endif
845 }
846
847
848 template<typename Mapping>
IsShadowMemImpl(uptr mem)849 bool IsShadowMemImpl(uptr mem) {
850 return mem >= Mapping::kShadowBeg && mem <= Mapping::kShadowEnd;
851 }
852
853 ALWAYS_INLINE
IsShadowMem(uptr mem)854 bool IsShadowMem(uptr mem) {
855 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
856 switch (vmaSize) {
857 case 39: return IsShadowMemImpl<Mapping39>(mem);
858 case 42: return IsShadowMemImpl<Mapping42>(mem);
859 case 48: return IsShadowMemImpl<Mapping48>(mem);
860 }
861 DCHECK(0);
862 return false;
863 #elif defined(__powerpc64__)
864 switch (vmaSize) {
865 #if !SANITIZER_GO
866 case 44: return IsShadowMemImpl<Mapping44>(mem);
867 #endif
868 case 46: return IsShadowMemImpl<Mapping46>(mem);
869 case 47: return IsShadowMemImpl<Mapping47>(mem);
870 }
871 DCHECK(0);
872 return false;
873 #elif defined(__mips64)
874 switch (vmaSize) {
875 #if !SANITIZER_GO
876 case 40: return IsShadowMemImpl<Mapping40>(mem);
877 #else
878 case 47: return IsShadowMemImpl<Mapping47>(mem);
879 #endif
880 }
881 DCHECK(0);
882 return false;
883 #else
884 return IsShadowMemImpl<Mapping>(mem);
885 #endif
886 }
887
888
889 template<typename Mapping>
IsMetaMemImpl(uptr mem)890 bool IsMetaMemImpl(uptr mem) {
891 return mem >= Mapping::kMetaShadowBeg && mem <= Mapping::kMetaShadowEnd;
892 }
893
894 ALWAYS_INLINE
IsMetaMem(uptr mem)895 bool IsMetaMem(uptr mem) {
896 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
897 switch (vmaSize) {
898 case 39: return IsMetaMemImpl<Mapping39>(mem);
899 case 42: return IsMetaMemImpl<Mapping42>(mem);
900 case 48: return IsMetaMemImpl<Mapping48>(mem);
901 }
902 DCHECK(0);
903 return false;
904 #elif defined(__powerpc64__)
905 switch (vmaSize) {
906 #if !SANITIZER_GO
907 case 44: return IsMetaMemImpl<Mapping44>(mem);
908 #endif
909 case 46: return IsMetaMemImpl<Mapping46>(mem);
910 case 47: return IsMetaMemImpl<Mapping47>(mem);
911 }
912 DCHECK(0);
913 return false;
914 #elif defined(__mips64)
915 switch (vmaSize) {
916 #if !SANITIZER_GO
917 case 40: return IsMetaMemImpl<Mapping40>(mem);
918 #else
919 case 47: return IsMetaMemImpl<Mapping47>(mem);
920 #endif
921 }
922 DCHECK(0);
923 return false;
924 #else
925 return IsMetaMemImpl<Mapping>(mem);
926 #endif
927 }
928
929
930 template<typename Mapping>
MemToShadowImpl(uptr x)931 uptr MemToShadowImpl(uptr x) {
932 DCHECK(IsAppMem(x));
933 #if !SANITIZER_GO
934 return (((x) & ~(Mapping::kAppMemMsk | (kShadowCell - 1)))
935 ^ Mapping::kAppMemXor) * kShadowCnt;
936 #else
937 # ifndef SANITIZER_WINDOWS
938 return ((x & ~(kShadowCell - 1)) * kShadowCnt) | Mapping::kShadowBeg;
939 # else
940 return ((x & ~(kShadowCell - 1)) * kShadowCnt) + Mapping::kShadowBeg;
941 # endif
942 #endif
943 }
944
945 ALWAYS_INLINE
MemToShadow(uptr x)946 uptr MemToShadow(uptr x) {
947 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
948 switch (vmaSize) {
949 case 39: return MemToShadowImpl<Mapping39>(x);
950 case 42: return MemToShadowImpl<Mapping42>(x);
951 case 48: return MemToShadowImpl<Mapping48>(x);
952 }
953 DCHECK(0);
954 return 0;
955 #elif defined(__powerpc64__)
956 switch (vmaSize) {
957 #if !SANITIZER_GO
958 case 44: return MemToShadowImpl<Mapping44>(x);
959 #endif
960 case 46: return MemToShadowImpl<Mapping46>(x);
961 case 47: return MemToShadowImpl<Mapping47>(x);
962 }
963 DCHECK(0);
964 return 0;
965 #elif defined(__mips64)
966 switch (vmaSize) {
967 #if !SANITIZER_GO
968 case 40: return MemToShadowImpl<Mapping40>(x);
969 #else
970 case 47: return MemToShadowImpl<Mapping47>(x);
971 #endif
972 }
973 DCHECK(0);
974 return 0;
975 #else
976 return MemToShadowImpl<Mapping>(x);
977 #endif
978 }
979
980
981 template<typename Mapping>
MemToMetaImpl(uptr x)982 u32 *MemToMetaImpl(uptr x) {
983 DCHECK(IsAppMem(x));
984 #if !SANITIZER_GO
985 return (u32*)(((((x) & ~(Mapping::kAppMemMsk | (kMetaShadowCell - 1)))) /
986 kMetaShadowCell * kMetaShadowSize) | Mapping::kMetaShadowBeg);
987 #else
988 # ifndef SANITIZER_WINDOWS
989 return (u32*)(((x & ~(kMetaShadowCell - 1)) / \
990 kMetaShadowCell * kMetaShadowSize) | Mapping::kMetaShadowBeg);
991 # else
992 return (u32*)(((x & ~(kMetaShadowCell - 1)) / \
993 kMetaShadowCell * kMetaShadowSize) + Mapping::kMetaShadowBeg);
994 # endif
995 #endif
996 }
997
998 ALWAYS_INLINE
MemToMeta(uptr x)999 u32 *MemToMeta(uptr x) {
1000 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
1001 switch (vmaSize) {
1002 case 39: return MemToMetaImpl<Mapping39>(x);
1003 case 42: return MemToMetaImpl<Mapping42>(x);
1004 case 48: return MemToMetaImpl<Mapping48>(x);
1005 }
1006 DCHECK(0);
1007 return 0;
1008 #elif defined(__powerpc64__)
1009 switch (vmaSize) {
1010 #if !SANITIZER_GO
1011 case 44: return MemToMetaImpl<Mapping44>(x);
1012 #endif
1013 case 46: return MemToMetaImpl<Mapping46>(x);
1014 case 47: return MemToMetaImpl<Mapping47>(x);
1015 }
1016 DCHECK(0);
1017 return 0;
1018 #elif defined(__mips64)
1019 switch (vmaSize) {
1020 #if !SANITIZER_GO
1021 case 40: return MemToMetaImpl<Mapping40>(x);
1022 #else
1023 case 47: return MemToMetaImpl<Mapping47>(x);
1024 #endif
1025 }
1026 DCHECK(0);
1027 return 0;
1028 #else
1029 return MemToMetaImpl<Mapping>(x);
1030 #endif
1031 }
1032
1033
1034 template<typename Mapping>
ShadowToMemImpl(uptr s)1035 uptr ShadowToMemImpl(uptr s) {
1036 DCHECK(IsShadowMem(s));
1037 #if !SANITIZER_GO
1038 // The shadow mapping is non-linear and we've lost some bits, so we don't have
1039 // an easy way to restore the original app address. But the mapping is a
1040 // bijection, so we try to restore the address as belonging to low/mid/high
1041 // range consecutively and see if shadow->app->shadow mapping gives us the
1042 // same address.
1043 uptr p = (s / kShadowCnt) ^ Mapping::kAppMemXor;
1044 if (p >= Mapping::kLoAppMemBeg && p < Mapping::kLoAppMemEnd &&
1045 MemToShadow(p) == s)
1046 return p;
1047 # ifdef TSAN_MID_APP_RANGE
1048 p = ((s / kShadowCnt) ^ Mapping::kAppMemXor) +
1049 (Mapping::kMidAppMemBeg & Mapping::kAppMemMsk);
1050 if (p >= Mapping::kMidAppMemBeg && p < Mapping::kMidAppMemEnd &&
1051 MemToShadow(p) == s)
1052 return p;
1053 # endif
1054 return ((s / kShadowCnt) ^ Mapping::kAppMemXor) | Mapping::kAppMemMsk;
1055 #else // #if !SANITIZER_GO
1056 # ifndef SANITIZER_WINDOWS
1057 return (s & ~Mapping::kShadowBeg) / kShadowCnt;
1058 # else
1059 return (s - Mapping::kShadowBeg) / kShadowCnt;
1060 # endif // SANITIZER_WINDOWS
1061 #endif
1062 }
1063
1064 ALWAYS_INLINE
ShadowToMem(uptr s)1065 uptr ShadowToMem(uptr s) {
1066 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
1067 switch (vmaSize) {
1068 case 39: return ShadowToMemImpl<Mapping39>(s);
1069 case 42: return ShadowToMemImpl<Mapping42>(s);
1070 case 48: return ShadowToMemImpl<Mapping48>(s);
1071 }
1072 DCHECK(0);
1073 return 0;
1074 #elif defined(__powerpc64__)
1075 switch (vmaSize) {
1076 #if !SANITIZER_GO
1077 case 44: return ShadowToMemImpl<Mapping44>(s);
1078 #endif
1079 case 46: return ShadowToMemImpl<Mapping46>(s);
1080 case 47: return ShadowToMemImpl<Mapping47>(s);
1081 }
1082 DCHECK(0);
1083 return 0;
1084 #elif defined(__mips64)
1085 switch (vmaSize) {
1086 #if !SANITIZER_GO
1087 case 40: return ShadowToMemImpl<Mapping40>(s);
1088 #else
1089 case 47: return ShadowToMemImpl<Mapping47>(s);
1090 #endif
1091 }
1092 DCHECK(0);
1093 return 0;
1094 #else
1095 return ShadowToMemImpl<Mapping>(s);
1096 #endif
1097 }
1098
1099
1100
1101 // The additional page is to catch shadow stack overflow as paging fault.
1102 // Windows wants 64K alignment for mmaps.
1103 const uptr kTotalTraceSize = (kTraceSize * sizeof(Event) + sizeof(Trace)
1104 + (64 << 10) + (64 << 10) - 1) & ~((64 << 10) - 1);
1105
1106 template<typename Mapping>
GetThreadTraceImpl(int tid)1107 uptr GetThreadTraceImpl(int tid) {
1108 uptr p = Mapping::kTraceMemBeg + (uptr)tid * kTotalTraceSize;
1109 DCHECK_LT(p, Mapping::kTraceMemEnd);
1110 return p;
1111 }
1112
1113 ALWAYS_INLINE
GetThreadTrace(int tid)1114 uptr GetThreadTrace(int tid) {
1115 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
1116 switch (vmaSize) {
1117 case 39: return GetThreadTraceImpl<Mapping39>(tid);
1118 case 42: return GetThreadTraceImpl<Mapping42>(tid);
1119 case 48: return GetThreadTraceImpl<Mapping48>(tid);
1120 }
1121 DCHECK(0);
1122 return 0;
1123 #elif defined(__powerpc64__)
1124 switch (vmaSize) {
1125 #if !SANITIZER_GO
1126 case 44: return GetThreadTraceImpl<Mapping44>(tid);
1127 #endif
1128 case 46: return GetThreadTraceImpl<Mapping46>(tid);
1129 case 47: return GetThreadTraceImpl<Mapping47>(tid);
1130 }
1131 DCHECK(0);
1132 return 0;
1133 #elif defined(__mips64)
1134 switch (vmaSize) {
1135 #if !SANITIZER_GO
1136 case 40: return GetThreadTraceImpl<Mapping40>(tid);
1137 #else
1138 case 47: return GetThreadTraceImpl<Mapping47>(tid);
1139 #endif
1140 }
1141 DCHECK(0);
1142 return 0;
1143 #else
1144 return GetThreadTraceImpl<Mapping>(tid);
1145 #endif
1146 }
1147
1148
1149 template<typename Mapping>
GetThreadTraceHeaderImpl(int tid)1150 uptr GetThreadTraceHeaderImpl(int tid) {
1151 uptr p = Mapping::kTraceMemBeg + (uptr)tid * kTotalTraceSize
1152 + kTraceSize * sizeof(Event);
1153 DCHECK_LT(p, Mapping::kTraceMemEnd);
1154 return p;
1155 }
1156
1157 ALWAYS_INLINE
GetThreadTraceHeader(int tid)1158 uptr GetThreadTraceHeader(int tid) {
1159 #if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO
1160 switch (vmaSize) {
1161 case 39: return GetThreadTraceHeaderImpl<Mapping39>(tid);
1162 case 42: return GetThreadTraceHeaderImpl<Mapping42>(tid);
1163 case 48: return GetThreadTraceHeaderImpl<Mapping48>(tid);
1164 }
1165 DCHECK(0);
1166 return 0;
1167 #elif defined(__powerpc64__)
1168 switch (vmaSize) {
1169 #if !SANITIZER_GO
1170 case 44: return GetThreadTraceHeaderImpl<Mapping44>(tid);
1171 #endif
1172 case 46: return GetThreadTraceHeaderImpl<Mapping46>(tid);
1173 case 47: return GetThreadTraceHeaderImpl<Mapping47>(tid);
1174 }
1175 DCHECK(0);
1176 return 0;
1177 #elif defined(__mips64)
1178 switch (vmaSize) {
1179 #if !SANITIZER_GO
1180 case 40: return GetThreadTraceHeaderImpl<Mapping40>(tid);
1181 #else
1182 case 47: return GetThreadTraceHeaderImpl<Mapping47>(tid);
1183 #endif
1184 }
1185 DCHECK(0);
1186 return 0;
1187 #else
1188 return GetThreadTraceHeaderImpl<Mapping>(tid);
1189 #endif
1190 }
1191
1192 void InitializePlatform();
1193 void InitializePlatformEarly();
1194 void CheckAndProtect();
1195 void InitializeShadowMemoryPlatform();
1196 void FlushShadowMemory();
1197 void WriteMemoryProfile(char *buf, uptr buf_size, uptr nthread, uptr nlive);
1198 int ExtractResolvFDs(void *state, int *fds, int nfd);
1199 int ExtractRecvmsgFDs(void *msg, int *fds, int nfd);
1200 uptr ExtractLongJmpSp(uptr *env);
1201 void ImitateTlsWrite(ThreadState *thr, uptr tls_addr, uptr tls_size);
1202
1203 int call_pthread_cancel_with_cleanup(int (*fn)(void *arg),
1204 void (*cleanup)(void *arg), void *arg);
1205
1206 void DestroyThreadState();
1207 void PlatformCleanUpThreadState(ThreadState *thr);
1208
1209 } // namespace __tsan
1210
1211 #endif // TSAN_PLATFORM_H
1212