Home
last modified time | relevance | path

Searched refs:asm_inline (Results 1 – 25 of 56) sorted by relevance

123

/linux-6.15/arch/x86/include/asm/
H A Dcmpxchg.h47 asm_inline volatile (lock #op "b %b0, %1" \
52 asm_inline volatile (lock #op "w %w0, %1" \
57 asm_inline volatile (lock #op "l %0, %1" \
62 asm_inline volatile (lock #op "q %q0, %1" \
94 asm_inline volatile(lock "cmpxchgb %2, %1" \
103 asm_inline volatile(lock "cmpxchgw %2, %1" \
112 asm_inline volatile(lock "cmpxchgl %2, %1" \
121 asm_inline volatile(lock "cmpxchgq %2, %1" \
168 asm_inline volatile(lock "cmpxchgb %[new], %[ptr]" \
180 asm_inline volatile(lock "cmpxchgw %[new], %[ptr]" \
[all …]
H A Datomic.h33 asm_inline volatile(LOCK_PREFIX "addl %1, %0" in arch_atomic_add()
40 asm_inline volatile(LOCK_PREFIX "subl %1, %0" in arch_atomic_sub()
53 asm_inline volatile(LOCK_PREFIX "incl %0" in arch_atomic_inc()
60 asm_inline volatile(LOCK_PREFIX "decl %0" in arch_atomic_dec()
119 asm_inline volatile(LOCK_PREFIX "andl %1, %0" in arch_atomic_and()
137 asm_inline volatile(LOCK_PREFIX "orl %1, %0" in arch_atomic_or()
155 asm_inline volatile(LOCK_PREFIX "xorl %1, %0" in arch_atomic_xor()
H A Datomic64_64.h25 asm_inline volatile(LOCK_PREFIX "addq %1, %0" in arch_atomic64_add()
32 asm_inline volatile(LOCK_PREFIX "subq %1, %0" in arch_atomic64_sub()
45 asm_inline volatile(LOCK_PREFIX "incq %0" in arch_atomic64_inc()
53 asm_inline volatile(LOCK_PREFIX "decq %0" in arch_atomic64_dec()
113 asm_inline volatile(LOCK_PREFIX "andq %1, %0" in arch_atomic64_and()
131 asm_inline volatile(LOCK_PREFIX "orq %1, %0" in arch_atomic64_or()
149 asm_inline volatile(LOCK_PREFIX "xorq %1, %0" in arch_atomic64_xor()
H A Dvmware.h111 asm_inline volatile (VMWARE_HYPERCALL in vmware_hypercall1()
136 asm_inline volatile (VMWARE_HYPERCALL in vmware_hypercall3()
161 asm_inline volatile (VMWARE_HYPERCALL in vmware_hypercall4()
187 asm_inline volatile (VMWARE_HYPERCALL in vmware_hypercall5()
215 asm_inline volatile (VMWARE_HYPERCALL in vmware_hypercall6()
243 asm_inline volatile (VMWARE_HYPERCALL in vmware_hypercall7()
277 asm_inline volatile ( in vmware_hypercall_hb_out()
305 asm_inline volatile ( in vmware_hypercall_hb_in()
H A Dbitops.h55 asm_inline volatile(LOCK_PREFIX "orb %b1,%0" in arch_set_bit()
60 asm_inline volatile(LOCK_PREFIX __ASM_SIZE(bts) " %1,%0" in arch_set_bit()
75 asm_inline volatile(LOCK_PREFIX "andb %b1,%0" in arch_clear_bit()
79 asm_inline volatile(LOCK_PREFIX __ASM_SIZE(btr) " %1,%0" in arch_clear_bit()
101 asm_inline volatile(LOCK_PREFIX "xorb %2,%1" in arch_xor_unlock_is_negative_byte()
125 asm_inline volatile(LOCK_PREFIX "xorb %b1,%0" in arch_change_bit()
129 asm_inline volatile(LOCK_PREFIX __ASM_SIZE(btc) " %1,%0" in arch_change_bit()
H A Dalternative.h239 asm_inline volatile(ALTERNATIVE(oldinstr, newinstr, ft_flags) : : : "memory")
242asm_inline volatile(ALTERNATIVE_2(oldinstr, newinstr1, ft_flags1, newinstr2, ft_flags2) ::: "memor…
253 asm_inline volatile(ALTERNATIVE(oldinstr, newinstr, ft_flags) \
258 asm_inline volatile(ALTERNATIVE(oldinstr, newinstr, ft_flags) \
272 asm_inline volatile(ALTERNATIVE("call %c[old]", "call %c[new]", ft_flags) \
286 asm_inline volatile(ALTERNATIVE_2("call %c[old]", "call %c[new1]", ft_flags1, \
H A Dcmpxchg_32.h22 asm_inline volatile(_lock "cmpxchg8b %[ptr]" \
48 asm_inline volatile(_lock "cmpxchg8b %[ptr]" \
94 asm_inline volatile( \
124 asm_inline volatile( \
H A Darch_hweight.h19 asm_inline (ALTERNATIVE(ANNOTATE_IGNORE_ALTERNATIVE in __arch_hweight32()
49 asm_inline (ALTERNATIVE(ANNOTATE_IGNORE_ALTERNATIVE in __arch_hweight64()
H A Dbug.h44 asm_inline volatile("1:\t" ins "\n" \
62 asm_inline volatile("1:\t" ins "\n" \
H A Druntime-const.h19 asm_inline("mov %1,%0\n1:\n" \
33 asm_inline("shrl $12,%k0\n1:\n" \
H A Dcmpxchg_64.h41 asm_inline volatile(_lock "cmpxchg16b %[ptr]" \
68 asm_inline volatile(_lock "cmpxchg16b %[ptr]" \
/linux-6.15/samples/bpf/
H A Dasm_goto_workaround.h22 #ifdef asm_inline
23 #undef asm_inline
24 #define asm_inline asm macro
/linux-6.15/arch/riscv/include/asm/
H A Druntime-const.h18 asm_inline(".option push\n\t" \
87 asm_inline(RISCV_RUNTIME_CONST_64_PREAMBLE \
103 asm_inline(RISCV_RUNTIME_CONST_64_PREAMBLE \
117 asm_inline(RISCV_RUNTIME_CONST_64_PREAMBLE \
131 asm_inline(RISCV_RUNTIME_CONST_64_PREAMBLE \
143 asm_inline(".option push\n\t" \
/linux-6.15/arch/s390/include/asm/
H A Duaccess.h43 asm_inline volatile( in raw_copy_from_user()
71 asm_inline volatile( in raw_copy_to_user()
150 asm_inline volatile( \
266 asm_inline volatile( \
374 asm_inline volatile( in __clear_user()
448 asm_inline volatile( \
493 asm_inline volatile( in __cmpxchg_user_key()
541 asm_inline volatile( in __cmpxchg_user_key()
583 asm_inline volatile( in __cmpxchg_user_key()
604 asm_inline volatile( in __cmpxchg_user_key()
[all …]
H A Dalternative.h164 asm_inline volatile(ALTERNATIVE(oldinstr, altinstr, feature) : : : "memory")
167 asm_inline volatile(ALTERNATIVE_2(oldinstr, altinstr1, feature1, \
172 asm_inline volatile (ALTERNATIVE(oldinstr, newinstr, feature) \
177 asm_inline volatile(ALTERNATIVE(oldinstr, altinstr, feature) \
H A Dbug.h12 asm_inline volatile( \
31 asm_inline volatile( \
H A Druntime-const.h11 asm_inline( \
27 asm_inline( \
H A Dcpu_mf.h174 asm_inline volatile ( in qctri()
188 asm_inline volatile ( in lcctl()
203 asm_inline volatile ( in __ecctr()
H A Dfutex.h19 asm_inline volatile( \
81 asm_inline volatile( in futex_atomic_cmpxchg_inatomic()
H A Dspinlock.h26 asm_inline( in spinlock_lockval()
99 asm_inline volatile( in arch_spin_unlock()
/linux-6.15/drivers/gpu/drm/vmwgfx/
H A Dvmwgfx_msg_arm64.h62 asm_inline volatile ( in vmware_hypercall1()
87 asm_inline volatile ( in vmware_hypercall5()
113 asm_inline volatile ( in vmware_hypercall6()
143 asm_inline volatile ( in vmware_hypercall7()
173 asm_inline volatile ( in vmware_hypercall_hb()
/linux-6.15/drivers/s390/cio/
H A Dioasm.c25 asm_inline volatile( in __stsch()
55 asm_inline volatile( in __msch()
109 asm_inline volatile( in __ssch()
181 asm_inline volatile( in chsc()
/linux-6.15/arch/s390/pci/
H A Dpci_insn.c163 asm_inline volatile ( in ____pcilg()
232 asm_inline volatile ( in __pcilg_mio()
270 asm_inline volatile ( in __pcistg()
324 asm_inline volatile ( in __pcistg_mio()
359 asm_inline volatile ( in __pcistb()
413 asm_inline volatile ( in __pcistb_mio()
/linux-6.15/arch/arm64/include/asm/
H A Druntime-const.h12 asm_inline("1:\t" \
25 asm_inline("1:\t" \
/linux-6.15/arch/s390/lib/
H A Duaccess.c70 asm_inline volatile( in raw_copy_from_user_key()
119 asm_inline volatile( in raw_copy_to_user_key()

123