1 #ifndef _ASM_GENERIC_PERCPU_H_ 2 #define _ASM_GENERIC_PERCPU_H_ 3 4 #include <linux/compiler.h> 5 #include <linux/threads.h> 6 #include <linux/percpu-defs.h> 7 8 #ifdef CONFIG_SMP 9 10 /* 11 * per_cpu_offset() is the offset that has to be added to a 12 * percpu variable to get to the instance for a certain processor. 13 * 14 * Most arches use the __per_cpu_offset array for those offsets but 15 * some arches have their own ways of determining the offset (x86_64, s390). 16 */ 17 #ifndef __per_cpu_offset 18 extern unsigned long __per_cpu_offset[NR_CPUS]; 19 20 #define per_cpu_offset(x) (__per_cpu_offset[x]) 21 #endif 22 23 /* 24 * Determine the offset for the currently active processor. 25 * An arch may define __my_cpu_offset to provide a more effective 26 * means of obtaining the offset to the per cpu variables of the 27 * current processor. 28 */ 29 #ifndef __my_cpu_offset 30 #define __my_cpu_offset per_cpu_offset(raw_smp_processor_id()) 31 #endif 32 #ifdef CONFIG_DEBUG_PREEMPT 33 #define my_cpu_offset per_cpu_offset(smp_processor_id()) 34 #else 35 #define my_cpu_offset __my_cpu_offset 36 #endif 37 38 /* 39 * Add an offset to a pointer but keep the pointer as-is. Use RELOC_HIDE() 40 * to prevent the compiler from making incorrect assumptions about the 41 * pointer value. The weird cast keeps both GCC and sparse happy. 42 */ 43 #define SHIFT_PERCPU_PTR(__p, __offset) ({ \ 44 __verify_pcpu_ptr((__p)); \ 45 RELOC_HIDE((typeof(*(__p)) __kernel __force *)(__p), (__offset)); \ 46 }) 47 48 /* 49 * A percpu variable may point to a discarded regions. The following are 50 * established ways to produce a usable pointer from the percpu variable 51 * offset. 52 */ 53 #define per_cpu(var, cpu) \ 54 (*SHIFT_PERCPU_PTR(&(var), per_cpu_offset(cpu))) 55 56 /* 57 * Arch may define arch_raw_cpu_ptr() to provide more efficient address 58 * translations for raw_cpu_ptr(). 59 */ 60 #ifndef arch_raw_cpu_ptr 61 #define arch_raw_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset) 62 #endif 63 64 #define raw_cpu_ptr(ptr) arch_raw_cpu_ptr(ptr) 65 66 #ifdef CONFIG_DEBUG_PREEMPT 67 #define this_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, my_cpu_offset) 68 #else 69 #define this_cpu_ptr(ptr) raw_cpu_ptr(ptr) 70 #endif 71 72 #define __get_cpu_var(var) (*this_cpu_ptr(&(var))) 73 #define __raw_get_cpu_var(var) (*raw_cpu_ptr(&(var))) 74 75 #ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA 76 extern void setup_per_cpu_areas(void); 77 #endif 78 79 #else /* ! SMP */ 80 81 #define VERIFY_PERCPU_PTR(__p) ({ \ 82 __verify_pcpu_ptr((__p)); \ 83 (typeof(*(__p)) __kernel __force *)(__p); \ 84 }) 85 86 #define per_cpu(var, cpu) (*((void)(cpu), VERIFY_PERCPU_PTR(&(var)))) 87 #define __get_cpu_var(var) (*VERIFY_PERCPU_PTR(&(var))) 88 #define __raw_get_cpu_var(var) (*VERIFY_PERCPU_PTR(&(var))) 89 #define this_cpu_ptr(ptr) per_cpu_ptr(ptr, 0) 90 #define raw_cpu_ptr(ptr) this_cpu_ptr(ptr) 91 92 #endif /* SMP */ 93 94 #ifndef PER_CPU_BASE_SECTION 95 #ifdef CONFIG_SMP 96 #define PER_CPU_BASE_SECTION ".data..percpu" 97 #else 98 #define PER_CPU_BASE_SECTION ".data" 99 #endif 100 #endif 101 102 #ifdef CONFIG_SMP 103 104 #ifdef MODULE 105 #define PER_CPU_SHARED_ALIGNED_SECTION "" 106 #define PER_CPU_ALIGNED_SECTION "" 107 #else 108 #define PER_CPU_SHARED_ALIGNED_SECTION "..shared_aligned" 109 #define PER_CPU_ALIGNED_SECTION "..shared_aligned" 110 #endif 111 #define PER_CPU_FIRST_SECTION "..first" 112 113 #else 114 115 #define PER_CPU_SHARED_ALIGNED_SECTION "" 116 #define PER_CPU_ALIGNED_SECTION "..shared_aligned" 117 #define PER_CPU_FIRST_SECTION "" 118 119 #endif 120 121 #ifndef PER_CPU_ATTRIBUTES 122 #define PER_CPU_ATTRIBUTES 123 #endif 124 125 #ifndef PER_CPU_DEF_ATTRIBUTES 126 #define PER_CPU_DEF_ATTRIBUTES 127 #endif 128 129 /* Keep until we have removed all uses of __this_cpu_ptr */ 130 #define __this_cpu_ptr raw_cpu_ptr 131 132 #endif /* _ASM_GENERIC_PERCPU_H_ */ 133