Home
last modified time | relevance | path

Searched refs:VR0 (Results 1 – 7 of 7) sorted by relevance

/f-stack/freebsd/contrib/openzfs/module/zfs/
H A Dvdev_raidz_math_avx512bw.c51 #define VR0(r...) VR0_(r) macro
79 "vpxorq 0x00(%[SRC]), %%" VR0(r)", %%" VR0(r) "\n" \
87 "vpxorq 0x00(%[SRC]), %%" VR0(r)", %%" VR0(r) "\n" \
123 "vmovdqa64 %" VR0(r) ", %" VR4(r) "\n" \
130 "vmovdqa64 %" VR0(r) ", %" VR2(r) "\n" \
143 "vmovdqa64 0x00(%[SRC]), %%" VR0(r) "\n" \
196 "vpaddb %" VR0(r)", %" VR0(r)", %" VR0(r) "\n" \
251 "vpandq %%" _0f ", %%" VR0(r) ", %%" VR0(r) "\n" \
269 "vpshufb %%" VR0(r) ", %%" _ltmul ", %%" VR0(r) "\n"\
272 "vpxorq %%" _ta ", %%" VR0(r) ", %%" VR0(r) "\n" \
[all …]
H A Dvdev_raidz_math_avx2.c47 #define VR0(r...) VR0_(r) macro
76 "vpxor 0x00(%[SRC]), %%" VR0(r)", %%" VR0(r) "\n" \
84 "vpxor 0x00(%[SRC]), %%" VR0(r)", %%" VR0(r) "\n" \
120 "vmovdqa %" VR0(r) ", %" VR4(r) "\n" \
127 "vmovdqa %" VR0(r) ", %" VR2(r) "\n" \
198 "vpaddb %" VR0(r)", %" VR0(r)", %" VR0(r) "\n" \
202 "vpxor %ymm12, %" VR0(r)", %" VR0(r) "\n" \
253 "vpand %%" _0f ", %%" VR0(r) ", %%" VR0(r) "\n" \
271 "vpshufb %%" VR0(r) ", %%" _ltmul ", %%" VR0(r) "\n"\
274 "vpxor %%" _ta ", %%" VR0(r) ", %%" VR0(r) "\n" \
[all …]
H A Dvdev_raidz_math_ssse3.c48 #define VR0(r...) VR0_(r) macro
77 "pxor 0x00(%[SRC]), %%" VR0(r) "\n" \
85 "pxor 0x00(%[SRC]), %%" VR0(r) "\n" \
99 "pxor %" VR0(r) ", %" VR4(r) "\n" \
106 "pxor %" VR0(r) ", %" VR2(r) "\n" \
121 "movdqa %" VR0(r) ", %" VR4(r) "\n" \
128 "movdqa %" VR0(r) ", %" VR2(r) "\n" \
195 "pcmpgtb %" VR0(r)", %xmm14\n" \
199 "paddb %" VR0(r)", %" VR0(r) "\n" \
251 "psraw $0x4, %%" VR0(r) "\n" \
[all …]
H A Dvdev_raidz_math_powerpc_altivec_common.h150 "vxor " VR0(r) "," VR0(r) ",21\n" \
180 "vxor " VR0(r) "," VR0(r) ",21\n" \
195 "vxor " VR0(r) "," VR0(r) ",21\n" \
236 "vxor " VR0(r) "," VR0(r) "," VR0(r) "\n" \
249 "vxor " VR0(r) "," VR0(r) "," VR0(r) "\n" \
257 "vxor " VR0(r) "," VR0(r) "," VR0(r) "\n" \
424 "vaddubm " VR0(r) "," VR0(r) "," VR0(r) "\n" \
428 "vxor " VR0(r) ",19," VR0(r) "\n" \
442 "vaddubm " VR0(r) "," VR0(r) "," VR0(r) "\n" \
444 "vxor " VR0(r) ",19," VR0(r) "\n" \
[all …]
H A Dvdev_raidz_math_aarch64_neon_common.h149 "eor " VR0(r) ".16b," VR0(r) ".16b,v21.16b\n" \
179 "eor " VR0(r) ".16b," VR0(r) ".16b,v21.16b\n" \
194 "eor " VR0(r) ".16b," VR0(r) ".16b,v21.16b\n" \
235 "eor " VR0(r) ".16b," VR0(r) ".16b," VR0(r) ".16b\n" \
248 "eor " VR0(r) ".16b," VR0(r) ".16b," VR0(r) ".16b\n" \
256 "eor " VR0(r) ".16b," VR0(r) ".16b," VR0(r) ".16b\n" \
419 "shl " VR0(r) ".16b," VR0(r) ".16b,#1\n" \
423 "eor " VR0(r) ".16b,v19.16b," VR0(r) ".16b\n" \
437 "shl " VR0(r) ".16b," VR0(r) ".16b,#1\n" \
439 "eor " VR0(r) ".16b,v19.16b," VR0(r) ".16b\n" \
[all …]
H A Dvdev_raidz_math_sse2.c49 #define VR0(r...) VR0_(r, 1, 2, 3, 4, 5, 6) macro
69 "pxor 0x00(%[SRC]), %%" VR0(r) "\n" \
77 "pxor 0x00(%[SRC]), %%" VR0(r) "\n" \
93 "pxor %" VR0(r) ", %" VR4(r) "\n" \
100 "pxor %" VR0(r) ", %" VR2(r) "\n" \
105 "pxor %" VR0(r) ", %" VR1(r)); \
117 "movdqa %" VR0(r) ", %" VR4(r) "\n" \
129 "movdqa %" VR0(r) ", %" VR1(r)); \
225 _MUL2_x2(VR0(r), VR1(r)); \
229 _MUL2_x2(VR0(r), VR1(r)); \
[all …]
H A Dvdev_raidz_math_avx512f.c50 #define VR0(r...) VR0_(r) macro
93 "vpxorq 0x00(%[SRC]), %%" VR0(r)", %%" VR0(r) "\n" \
107 "vpxorq %" VR0(r) ", %" VR4(r)", %" VR4(r) "\n" \
114 "vpxorq %" VR0(r) ", %" VR2(r)", %" VR2(r) "\n" \
129 "vmovdqa64 %" VR0(r) ", %" VR4(r) "\n" \
136 "vmovdqa64 %" VR0(r) ", %" VR2(r) "\n" \
147 "vmovdqa64 0x00(%[SRC]), %%" VR0(r) "\n" \
161 "vmovdqa64 %%" VR0(r) ", 0x00(%[DST])\n" \
185 "vpandq %" VR0(r)", %zmm30, %zmm26\n" \
193 "vpsllq $1, %" VR0(r)", %" VR0(r) "\n" \
[all …]