diff --git a/xml/System.Runtime.Intrinsics.X86/Aes.xml b/xml/System.Runtime.Intrinsics.X86/Aes.xml
index d988cc866be..31cf5b61078 100644
--- a/xml/System.Runtime.Intrinsics.X86/Aes.xml
+++ b/xml/System.Runtime.Intrinsics.X86/Aes.xml
@@ -19,9 +19,7 @@
-
- This class provides access to Intel AES hardware instructions via intrinsics
-
+ This class provides access to Intel AES hardware instructions via intrinsics.
To be added.
@@ -47,10 +45,7 @@
To be added.
To be added.
-
- __m128i _mm_aesdec_si128 (__m128i a, __m128i RoundKey)
- AESDEC xmm, xmm/m128
-
+ __m128i _mm_aesdec_si128 (__m128i a, __m128i RoundKey)AESDEC xmm, xmm/m128
To be added.
To be added.
@@ -77,10 +72,7 @@
To be added.
To be added.
-
- __m128i _mm_aesdeclast_si128 (__m128i a, __m128i RoundKey)
- AESDECLAST xmm, xmm/m128
-
+ __m128i _mm_aesdeclast_si128 (__m128i a, __m128i RoundKey)AESDECLAST xmm, xmm/m128
To be added.
To be added.
@@ -107,10 +99,7 @@
To be added.
To be added.
-
- __m128i _mm_aesenc_si128 (__m128i a, __m128i RoundKey)
- AESENC xmm, xmm/m128
-
+ __m128i _mm_aesenc_si128 (__m128i a, __m128i RoundKey)AESENC xmm, xmm/m128
To be added.
To be added.
@@ -137,10 +126,7 @@
To be added.
To be added.
-
- __m128i _mm_aesenclast_si128 (__m128i a, __m128i RoundKey)
- AESENCLAST xmm, xmm/m128
-
+ __m128i _mm_aesenclast_si128 (__m128i a, __m128i RoundKey)AESENCLAST xmm, xmm/m128
To be added.
To be added.
@@ -165,10 +151,7 @@
To be added.
-
- __m128i _mm_aesimc_si128 (__m128i a)
- AESIMC xmm, xmm/m128
-
+ __m128i _mm_aesimc_si128 (__m128i a)AESIMC xmm, xmm/m128
To be added.
To be added.
@@ -216,10 +199,7 @@
To be added.
To be added.
-
- __m128i _mm_aeskeygenassist_si128 (__m128i a, const int imm8)
- AESKEYGENASSIST xmm, xmm/m128, imm8
-
+ __m128i _mm_aeskeygenassist_si128 (__m128i a, const int imm8)AESKEYGENASSIST xmm, xmm/m128, imm8
To be added.
To be added.
diff --git a/xml/System.Runtime.Intrinsics.X86/Avx.xml b/xml/System.Runtime.Intrinsics.X86/Avx.xml
index d58d1a4d126..fba80c9be71 100644
--- a/xml/System.Runtime.Intrinsics.X86/Avx.xml
+++ b/xml/System.Runtime.Intrinsics.X86/Avx.xml
@@ -24,9 +24,7 @@
-
- This class provides access to Intel AVX hardware instructions via intrinsics
-
+ This class provides access to Intel AVX hardware instructions via intrinsics.
To be added.
@@ -56,10 +54,7 @@
To be added.
To be added.
-
- __m256d _mm256_add_pd (__m256d a, __m256d b)
- VADDPD ymm, ymm, ymm/m256
-
+ __m256d _mm256_add_pd (__m256d a, __m256d b)VADDPD ymm, ymm, ymm/m256
To be added.
To be added.
@@ -90,10 +85,7 @@
To be added.
To be added.
-
- __m256 _mm256_add_ps (__m256 a, __m256 b)
- VADDPS ymm, ymm, ymm/m256
-
+ __m256 _mm256_add_ps (__m256 a, __m256 b)VADDPS ymm, ymm, ymm/m256
To be added.
To be added.
@@ -124,10 +116,7 @@
To be added.
To be added.
-
- __m256d _mm256_addsub_pd (__m256d a, __m256d b)
- VADDSUBPD ymm, ymm, ymm/m256
-
+ __m256d _mm256_addsub_pd (__m256d a, __m256d b)VADDSUBPD ymm, ymm, ymm/m256
To be added.
To be added.
@@ -158,10 +147,7 @@
To be added.
To be added.
-
- __m256 _mm256_addsub_ps (__m256 a, __m256 b)
- VADDSUBPS ymm, ymm, ymm/m256
-
+ __m256 _mm256_addsub_ps (__m256 a, __m256 b)VADDSUBPS ymm, ymm, ymm/m256
To be added.
To be added.
@@ -192,10 +178,7 @@
To be added.
To be added.
-
- __m256d _mm256_and_pd (__m256d a, __m256d b)
- VANDPD ymm, ymm, ymm/m256
-
+ __m256d _mm256_and_pd (__m256d a, __m256d b)VANDPD ymm, ymm, ymm/m256
To be added.
To be added.
@@ -226,10 +209,7 @@
To be added.
To be added.
-
- __m256 _mm256_and_ps (__m256 a, __m256 b)
- VANDPS ymm, ymm, ymm/m256
-
+ __m256 _mm256_and_ps (__m256 a, __m256 b)VANDPS ymm, ymm, ymm/m256
To be added.
To be added.
@@ -260,10 +240,7 @@
To be added.
To be added.
-
- __m256d _mm256_andnot_pd (__m256d a, __m256d b)
- VANDNPD ymm, ymm, ymm/m256
-
+ __m256d _mm256_andnot_pd (__m256d a, __m256d b)VANDNPD ymm, ymm, ymm/m256
To be added.
To be added.
@@ -294,10 +271,7 @@
To be added.
To be added.
-
- __m256 _mm256_andnot_ps (__m256 a, __m256 b)
- VANDNPS ymm, ymm, ymm/m256
-
+ __m256 _mm256_andnot_ps (__m256 a, __m256 b)VANDNPS ymm, ymm, ymm/m256
To be added.
To be added.
@@ -330,10 +304,7 @@
To be added.
To be added.
To be added.
-
- __m256d _mm256_blend_pd (__m256d a, __m256d b, const int imm8)
- VBLENDPD ymm, ymm, ymm/m256, imm8
-
+ __m256d _mm256_blend_pd (__m256d a, __m256d b, const int imm8)VBLENDPD ymm, ymm, ymm/m256, imm8
To be added.
To be added.
@@ -366,10 +337,7 @@
To be added.
To be added.
To be added.
-
- __m256 _mm256_blend_ps (__m256 a, __m256 b, const int imm8)
- VBLENDPS ymm, ymm, ymm/m256, imm8
-
+ __m256 _mm256_blend_ps (__m256 a, __m256 b, const int imm8)VBLENDPS ymm, ymm, ymm/m256, imm8
To be added.
To be added.
@@ -402,10 +370,7 @@
To be added.
To be added.
To be added.
-
- __m256d _mm256_blendv_pd (__m256d a, __m256d b, __m256d mask)
- VBLENDVPD ymm, ymm, ymm/m256, ymm
-
+ __m256d _mm256_blendv_pd (__m256d a, __m256d b, __m256d mask)VBLENDVPD ymm, ymm, ymm/m256, ymm
To be added.
To be added.
@@ -438,10 +403,7 @@
To be added.
To be added.
To be added.
-
- __m256 _mm256_blendv_ps (__m256 a, __m256 b, __m256 mask)
- VBLENDVPS ymm, ymm, ymm/m256, ymm
-
+ __m256 _mm256_blendv_ps (__m256 a, __m256 b, __m256 mask)VBLENDVPS ymm, ymm, ymm/m256, ymm
To be added.
To be added.
@@ -469,10 +431,7 @@
To be added.
-
- __m128 _mm_broadcast_ss (float const * mem_addr)
- VBROADCASTSS xmm, m32
-
+ __m128 _mm_broadcast_ss (float const * mem_addr)VBROADCASTSS xmm, m32
To be added.
To be added.
@@ -500,10 +459,7 @@
To be added.
-
- __m256d _mm256_broadcast_sd (double const * mem_addr)
- VBROADCASTSD ymm, m64
-
+ __m256d _mm256_broadcast_sd (double const * mem_addr)VBROADCASTSD ymm, m64
To be added.
To be added.
@@ -531,10 +487,7 @@
To be added.
-
- __m256 _mm256_broadcast_ss (float const * mem_addr)
- VBROADCASTSS ymm, m32
-
+ __m256 _mm256_broadcast_ss (float const * mem_addr)VBROADCASTSS ymm, m32
To be added.
To be added.
@@ -562,10 +515,7 @@
To be added.
-
- __m256d _mm256_broadcast_pd (__m128d const * mem_addr)
- VBROADCASTF128, ymm, m128
-
+ __m256d _mm256_broadcast_pd (__m128d const * mem_addr)VBROADCASTF128, ymm, m128
To be added.
To be added.
@@ -593,10 +543,7 @@
To be added.
-
- __m256 _mm256_broadcast_ps (__m128 const * mem_addr)
- VBROADCASTF128, ymm, m128
-
+ __m256 _mm256_broadcast_ps (__m128 const * mem_addr)VBROADCASTF128, ymm, m128
To be added.
To be added.
@@ -625,10 +572,7 @@
To be added.
-
- __m256d _mm256_ceil_pd (__m256d a)
- VROUNDPD ymm, ymm/m256, imm8(10)
-
+ __m256d _mm256_ceil_pd (__m256d a)VROUNDPD ymm, ymm/m256, imm8(10)
To be added.
To be added.
@@ -657,10 +601,7 @@
To be added.
-
- __m256 _mm256_ceil_ps (__m256 a)
- VROUNDPS ymm, ymm/m256, imm8(10)
-
+ __m256 _mm256_ceil_ps (__m256 a)VROUNDPS ymm, ymm/m256, imm8(10)
To be added.
To be added.
@@ -693,10 +634,7 @@
To be added.
To be added.
To be added.
-
- __m128d _mm_cmp_pd (__m128d a, __m128d b, const int imm8)
- VCMPPD xmm, xmm, xmm/m128, imm8
-
+ __m128d _mm_cmp_pd (__m128d a, __m128d b, const int imm8)VCMPPD xmm, xmm, xmm/m128, imm8
To be added.
To be added.
@@ -729,10 +667,7 @@
To be added.
To be added.
To be added.
-
- __m128 _mm_cmp_ps (__m128 a, __m128 b, const int imm8)
- VCMPPS xmm, xmm, xmm/m128, imm8
-
+ __m128 _mm_cmp_ps (__m128 a, __m128 b, const int imm8)VCMPPS xmm, xmm, xmm/m128, imm8
To be added.
To be added.
@@ -765,10 +700,7 @@
To be added.
To be added.
To be added.
-
- __m256d _mm256_cmp_pd (__m256d a, __m256d b, const int imm8)
- VCMPPD ymm, ymm, ymm/m256, imm8
-
+ __m256d _mm256_cmp_pd (__m256d a, __m256d b, const int imm8)VCMPPD ymm, ymm, ymm/m256, imm8
To be added.
To be added.
@@ -801,10 +733,7 @@
To be added.
To be added.
To be added.
-
- __m256 _mm256_cmp_ps (__m256 a, __m256 b, const int imm8)
- VCMPPS ymm, ymm, ymm/m256, imm8
-
+ __m256 _mm256_cmp_ps (__m256 a, __m256 b, const int imm8)VCMPPS ymm, ymm, ymm/m256, imm8
To be added.
To be added.
@@ -837,10 +766,7 @@
To be added.
To be added.
To be added.
-
- __m128d _mm_cmp_sd (__m128d a, __m128d b, const int imm8)
- VCMPSS xmm, xmm, xmm/m32, imm8
-
+ __m128d _mm_cmp_sd (__m128d a, __m128d b, const int imm8)VCMPSS xmm, xmm, xmm/m32, imm8
To be added.
To be added.
@@ -873,10 +799,7 @@
To be added.
To be added.
To be added.
-
- __m128 _mm_cmp_ss (__m128 a, __m128 b, const int imm8)
- VCMPSD xmm, xmm, xmm/m64, imm8
-
+ __m128 _mm_cmp_ss (__m128 a, __m128 b, const int imm8)VCMPSD xmm, xmm, xmm/m64, imm8
To be added.
To be added.
@@ -905,10 +828,7 @@
To be added.
-
- __m128i _mm256_cvtpd_epi32 (__m256d a)
- VCVTPD2DQ xmm, ymm/m256
-
+ __m128i _mm256_cvtpd_epi32 (__m256d a)VCVTPD2DQ xmm, ymm/m256
To be added.
To be added.
@@ -937,10 +857,7 @@
To be added.
-
- __m128i _mm256_cvttpd_epi32 (__m256d a)
- VCVTTPD2DQ xmm, ymm/m256
-
+ __m128i _mm256_cvttpd_epi32 (__m256d a)VCVTTPD2DQ xmm, ymm/m256
To be added.
To be added.
@@ -969,10 +886,7 @@
To be added.
-
- __m128 _mm256_cvtpd_ps (__m256d a)
- VCVTPD2PS xmm, ymm/m256
-
+ __m128 _mm256_cvtpd_ps (__m256d a)VCVTPD2PS xmm, ymm/m256
To be added.
To be added.
@@ -1001,10 +915,7 @@
To be added.
-
- __m256d _mm256_cvtepi32_pd (__m128i a)
- VCVTDQ2PD ymm, xmm/m128
-
+ __m256d _mm256_cvtepi32_pd (__m128i a)VCVTDQ2PD ymm, xmm/m128
To be added.
To be added.
@@ -1033,10 +944,7 @@
To be added.
-
- __m256d _mm256_cvtps_pd (__m128 a)
- VCVTPS2PD ymm, xmm/m128
-
+ __m256d _mm256_cvtps_pd (__m128 a)VCVTPS2PD ymm, xmm/m128
To be added.
To be added.
@@ -1065,10 +973,7 @@
To be added.
-
- __m256i _mm256_cvtps_epi32 (__m256 a)
- VCVTPS2DQ ymm, ymm/m256
-
+ __m256i _mm256_cvtps_epi32 (__m256 a)VCVTPS2DQ ymm, ymm/m256
To be added.
To be added.
@@ -1097,10 +1002,7 @@
To be added.
-
- __m256i _mm256_cvttps_epi32 (__m256 a)
- VCVTTPS2DQ ymm, ymm/m256
-
+ __m256i _mm256_cvttps_epi32 (__m256 a)VCVTTPS2DQ ymm, ymm/m256
To be added.
To be added.
@@ -1129,10 +1031,7 @@
To be added.
-
- __m256 _mm256_cvtepi32_ps (__m256i a)
- VCVTDQ2PS ymm, ymm/m256
-
+ __m256 _mm256_cvtepi32_ps (__m256i a)VCVTDQ2PS ymm, ymm/m256
To be added.
To be added.
@@ -1163,10 +1062,7 @@
To be added.
To be added.
-
- __m256d _mm256_div_pd (__m256d a, __m256d b)
- VDIVPD ymm, ymm, ymm/m256
-
+ __m256d _mm256_div_pd (__m256d a, __m256d b)VDIVPD ymm, ymm, ymm/m256
To be added.
To be added.
@@ -1197,10 +1093,7 @@
To be added.
To be added.
-
- __m256 _mm256_div_ps (__m256 a, __m256 b)
- VDIVPS ymm, ymm, ymm/m256
-
+ __m256 _mm256_div_ps (__m256 a, __m256 b)VDIVPS ymm, ymm, ymm/m256
To be added.
To be added.
@@ -1233,10 +1126,7 @@
To be added.
To be added.
To be added.
-
- __m256 _mm256_dp_ps (__m256 a, __m256 b, const int imm8)
- VDPPS ymm, ymm, ymm/m256, imm8
-
+ __m256 _mm256_dp_ps (__m256 a, __m256 b, const int imm8)VDPPS ymm, ymm, ymm/m256, imm8
To be added.
To be added.
@@ -1265,10 +1155,7 @@
To be added.
-
- __m256d _mm256_movedup_pd (__m256d a)
- VMOVDDUP ymm, ymm/m256
-
+ __m256d _mm256_movedup_pd (__m256d a)VMOVDDUP ymm, ymm/m256
To be added.
To be added.
@@ -1297,10 +1184,7 @@
To be added.
-
- __m256 _mm256_moveldup_ps (__m256 a)
- VMOVSLDUP ymm, ymm/m256
-
+ __m256 _mm256_moveldup_ps (__m256 a)VMOVSLDUP ymm, ymm/m256
To be added.
To be added.
@@ -1329,10 +1213,7 @@
To be added.
-
- __m256 _mm256_movehdup_ps (__m256 a)
- VMOVSHDUP ymm, ymm/m256
-
+ __m256 _mm256_movehdup_ps (__m256 a)VMOVSHDUP ymm, ymm/m256
To be added.
To be added.
@@ -1359,10 +1240,7 @@
To be added.
To be added.
-
- __m128i _mm256_extractf128_si256 (__m256i a, const int imm8)
- VEXTRACTF128 xmm/m128, ymm, imm8
-
+ __m128i _mm256_extractf128_si256 (__m256i a, const int imm8)VEXTRACTF128 xmm/m128, ymm, imm8
To be added.
To be added.
@@ -1389,10 +1267,7 @@
To be added.
To be added.
-
- __m128d _mm256_extractf128_pd (__m256d a, const int imm8)
- VEXTRACTF128 xmm/m128, ymm, imm8
-
+ __m128d _mm256_extractf128_pd (__m256d a, const int imm8)VEXTRACTF128 xmm/m128, ymm, imm8
To be added.
To be added.
@@ -1419,10 +1294,7 @@
To be added.
To be added.
-
- __m128i _mm256_extractf128_si256 (__m256i a, const int imm8)
- VEXTRACTF128 xmm/m128, ymm, imm8
-
+ __m128i _mm256_extractf128_si256 (__m256i a, const int imm8)VEXTRACTF128 xmm/m128, ymm, imm8
To be added.
To be added.
@@ -1449,10 +1321,7 @@
To be added.
To be added.
-
- __m128i _mm256_extractf128_si256 (__m256i a, const int imm8)
- VEXTRACTF128 xmm/m128, ymm, imm8
-
+ __m128i _mm256_extractf128_si256 (__m256i a, const int imm8)VEXTRACTF128 xmm/m128, ymm, imm8
To be added.
To be added.
@@ -1479,10 +1348,7 @@
To be added.
To be added.
-
- __m128i _mm256_extractf128_si256 (__m256i a, const int imm8)
- VEXTRACTF128 xmm/m128, ymm, imm8
-
+ __m128i _mm256_extractf128_si256 (__m256i a, const int imm8)VEXTRACTF128 xmm/m128, ymm, imm8
To be added.
To be added.
@@ -1509,10 +1375,7 @@
To be added.
To be added.
-
- __m128i _mm256_extractf128_si256 (__m256i a, const int imm8)
- VEXTRACTF128 xmm/m128, ymm, imm8
-
+ __m128i _mm256_extractf128_si256 (__m256i a, const int imm8)VEXTRACTF128 xmm/m128, ymm, imm8
To be added.
To be added.
@@ -1539,10 +1402,7 @@
To be added.
To be added.
-
- __m128 _mm256_extractf128_ps (__m256 a, const int imm8)
- VEXTRACTF128 xmm/m128, ymm, imm8
-
+ __m128 _mm256_extractf128_ps (__m256 a, const int imm8)VEXTRACTF128 xmm/m128, ymm, imm8
To be added.
To be added.
@@ -1569,10 +1429,7 @@
To be added.
To be added.
-
- __m128i _mm256_extractf128_si256 (__m256i a, const int imm8)
- VEXTRACTF128 xmm/m128, ymm, imm8
-
+ __m128i _mm256_extractf128_si256 (__m256i a, const int imm8)VEXTRACTF128 xmm/m128, ymm, imm8
To be added.
To be added.
@@ -1599,10 +1456,7 @@
To be added.
To be added.
-
- __m128i _mm256_extractf128_si256 (__m256i a, const int imm8)
- VEXTRACTF128 xmm/m128, ymm, imm8
-
+ __m128i _mm256_extractf128_si256 (__m256i a, const int imm8)VEXTRACTF128 xmm/m128, ymm, imm8
To be added.
To be added.
@@ -1629,10 +1483,7 @@
To be added.
To be added.
-
- __m128i _mm256_extractf128_si256 (__m256i a, const int imm8)
- VEXTRACTF128 xmm/m128, ymm, imm8
-
+ __m128i _mm256_extractf128_si256 (__m256i a, const int imm8)VEXTRACTF128 xmm/m128, ymm, imm8
To be added.
To be added.
@@ -1661,10 +1512,7 @@
To be added.
-
- __m256d _mm256_floor_pd (__m256d a)
- VROUNDPS ymm, ymm/m256, imm8(9)
-
+ __m256d _mm256_floor_pd (__m256d a)VROUNDPS ymm, ymm/m256, imm8(9)
To be added.
To be added.
@@ -1693,10 +1541,7 @@
To be added.
-
- __m256 _mm256_floor_ps (__m256 a)
- VROUNDPS ymm, ymm/m256, imm8(9)
-
+ __m256 _mm256_floor_ps (__m256 a)VROUNDPS ymm, ymm/m256, imm8(9)
To be added.
To be added.
@@ -1727,10 +1572,7 @@
To be added.
To be added.
-
- __m256d _mm256_hadd_pd (__m256d a, __m256d b)
- VHADDPD ymm, ymm, ymm/m256
-
+ __m256d _mm256_hadd_pd (__m256d a, __m256d b)VHADDPD ymm, ymm, ymm/m256
To be added.
To be added.
@@ -1761,10 +1603,7 @@
To be added.
To be added.
-
- __m256 _mm256_hadd_ps (__m256 a, __m256 b)
- VHADDPS ymm, ymm, ymm/m256
-
+ __m256 _mm256_hadd_ps (__m256 a, __m256 b)VHADDPS ymm, ymm, ymm/m256
To be added.
To be added.
@@ -1795,10 +1634,7 @@
To be added.
To be added.
-
- __m256d _mm256_hsub_pd (__m256d a, __m256d b)
- VHSUBPD ymm, ymm, ymm/m256
-
+ __m256d _mm256_hsub_pd (__m256d a, __m256d b)VHSUBPD ymm, ymm, ymm/m256
To be added.
To be added.
@@ -1829,10 +1665,7 @@
To be added.
To be added.
-
- __m256 _mm256_hsub_ps (__m256 a, __m256 b)
- VHSUBPS ymm, ymm, ymm/m256
-
+ __m256 _mm256_hsub_ps (__m256 a, __m256 b)VHSUBPS ymm, ymm, ymm/m256
To be added.
To be added.
@@ -1861,10 +1694,7 @@
To be added.
To be added.
To be added.
-
- __m256i _mm256_insertf128_si256 (__m256i a, __m128i b, int imm8)
- VINSERTF128 ymm, ymm, xmm/m128, imm8
-
+ __m256i _mm256_insertf128_si256 (__m256i a, __m128i b, int imm8)VINSERTF128 ymm, ymm, xmm/m128, imm8
To be added.
To be added.
@@ -1893,10 +1723,7 @@
To be added.
To be added.
To be added.
-
- __m256d _mm256_insertf128_pd (__m256d a, __m128d b, int imm8)
- VINSERTF128 ymm, ymm, xmm/m128, imm8
-
+ __m256d _mm256_insertf128_pd (__m256d a, __m128d b, int imm8)VINSERTF128 ymm, ymm, xmm/m128, imm8
To be added.
To be added.
@@ -1925,10 +1752,7 @@
To be added.
To be added.
To be added.
-
- __m256i _mm256_insertf128_si256 (__m256i a, __m128i b, int imm8)
- VINSERTF128 ymm, ymm, xmm/m128, imm8
-
+ __m256i _mm256_insertf128_si256 (__m256i a, __m128i b, int imm8)VINSERTF128 ymm, ymm, xmm/m128, imm8
To be added.
To be added.
@@ -1957,10 +1781,7 @@
To be added.
To be added.
To be added.
-
- __m256i _mm256_insertf128_si256 (__m256i a, __m128i b, int imm8)
- VINSERTF128 ymm, ymm, xmm/m128, imm8
-
+ __m256i _mm256_insertf128_si256 (__m256i a, __m128i b, int imm8)VINSERTF128 ymm, ymm, xmm/m128, imm8
To be added.
To be added.
@@ -1989,10 +1810,7 @@
To be added.
To be added.
To be added.
-
- __m256i _mm256_insertf128_si256 (__m256i a, __m128i b, int imm8)
- VINSERTF128 ymm, ymm, xmm/m128, imm8
-
+ __m256i _mm256_insertf128_si256 (__m256i a, __m128i b, int imm8)VINSERTF128 ymm, ymm, xmm/m128, imm8
To be added.
To be added.
@@ -2021,10 +1839,7 @@
To be added.
To be added.
To be added.
-
- __m256i _mm256_insertf128_si256 (__m256i a, __m128i b, int imm8)
- VINSERTF128 ymm, ymm, xmm/m128, imm8
-
+ __m256i _mm256_insertf128_si256 (__m256i a, __m128i b, int imm8)VINSERTF128 ymm, ymm, xmm/m128, imm8
To be added.
To be added.
@@ -2053,10 +1868,7 @@
To be added.
To be added.
To be added.
-
- __m256 _mm256_insertf128_ps (__m256 a, __m128 b, int imm8)
- VINSERTF128 ymm, ymm, xmm/m128, imm8
-
+ __m256 _mm256_insertf128_ps (__m256 a, __m128 b, int imm8)VINSERTF128 ymm, ymm, xmm/m128, imm8
To be added.
To be added.
@@ -2085,10 +1897,7 @@
To be added.
To be added.
To be added.
-
- __m256i _mm256_insertf128_si256 (__m256i a, __m128i b, int imm8)
- VINSERTF128 ymm, ymm, xmm/m128, imm8
-
+ __m256i _mm256_insertf128_si256 (__m256i a, __m128i b, int imm8)VINSERTF128 ymm, ymm, xmm/m128, imm8
To be added.
To be added.
@@ -2117,10 +1926,7 @@
To be added.
To be added.
To be added.
-
- __m256i _mm256_insertf128_si256 (__m256i a, __m128i b, int imm8)
- VINSERTF128 ymm, ymm, xmm/m128, imm8
-
+ __m256i _mm256_insertf128_si256 (__m256i a, __m128i b, int imm8)VINSERTF128 ymm, ymm, xmm/m128, imm8
To be added.
To be added.
@@ -2149,10 +1955,7 @@
To be added.
To be added.
To be added.
-
- __m256i _mm256_insertf128_si256 (__m256i a, __m128i b, int imm8)
- VINSERTF128 ymm, ymm, xmm/m128, imm8
-
+ __m256i _mm256_insertf128_si256 (__m256i a, __m128i b, int imm8)VINSERTF128 ymm, ymm, xmm/m128, imm8
To be added.
To be added.
@@ -2205,10 +2008,7 @@
To be added.
-
- __m256i _mm256_load_si256 (__m256i const * mem_addr)
- VMOVDQA ymm, m256
-
+ __m256i _mm256_load_si256 (__m256i const * mem_addr)VMOVDQA ymm, m256
To be added.
To be added.
@@ -2236,10 +2036,7 @@
To be added.
-
- __m256d _mm256_load_pd (double const * mem_addr)
- VMOVAPD ymm, ymm/m256
-
+ __m256d _mm256_load_pd (double const * mem_addr)VMOVAPD ymm, ymm/m256
To be added.
To be added.
@@ -2267,10 +2064,7 @@
To be added.
-
- __m256i _mm256_load_si256 (__m256i const * mem_addr)
- VMOVDQA ymm, m256
-
+ __m256i _mm256_load_si256 (__m256i const * mem_addr)VMOVDQA ymm, m256
To be added.
To be added.
@@ -2298,10 +2092,7 @@
To be added.
-
- __m256i _mm256_load_si256 (__m256i const * mem_addr)
- VMOVDQA ymm, m256
-
+ __m256i _mm256_load_si256 (__m256i const * mem_addr)VMOVDQA ymm, m256
To be added.
To be added.
@@ -2329,10 +2120,7 @@
To be added.
-
- __m256i _mm256_load_si256 (__m256i const * mem_addr)
- VMOVDQA ymm, m256
-
+ __m256i _mm256_load_si256 (__m256i const * mem_addr)VMOVDQA ymm, m256
To be added.
To be added.
@@ -2360,10 +2148,7 @@
To be added.
-
- __m256i _mm256_load_si256 (__m256i const * mem_addr)
- VMOVDQA ymm, m256
-
+ __m256i _mm256_load_si256 (__m256i const * mem_addr)VMOVDQA ymm, m256
To be added.
To be added.
@@ -2391,10 +2176,7 @@
To be added.
-
- __m256 _mm256_load_ps (float const * mem_addr)
- VMOVAPS ymm, ymm/m256
-
+ __m256 _mm256_load_ps (float const * mem_addr)VMOVAPS ymm, ymm/m256
To be added.
To be added.
@@ -2422,10 +2204,7 @@
To be added.
-
- __m256i _mm256_load_si256 (__m256i const * mem_addr)
- VMOVDQA ymm, m256
-
+ __m256i _mm256_load_si256 (__m256i const * mem_addr)VMOVDQA ymm, m256
To be added.
To be added.
@@ -2453,10 +2232,7 @@
To be added.
-
- __m256i _mm256_load_si256 (__m256i const * mem_addr)
- VMOVDQA ymm, m256
-
+ __m256i _mm256_load_si256 (__m256i const * mem_addr)VMOVDQA ymm, m256
To be added.
To be added.
@@ -2484,10 +2260,7 @@
To be added.
-
- __m256i _mm256_load_si256 (__m256i const * mem_addr)
- VMOVDQA ymm, m256
-
+ __m256i _mm256_load_si256 (__m256i const * mem_addr)VMOVDQA ymm, m256
To be added.
To be added.
@@ -2515,10 +2288,7 @@
To be added.
-
- __m256i _mm256_lddqu_si256 (__m256i const * mem_addr)
- VLDDQU ymm, m256
-
+ __m256i _mm256_lddqu_si256 (__m256i const * mem_addr)VLDDQU ymm, m256
To be added.
To be added.
@@ -2546,10 +2316,7 @@
To be added.
-
- __m256i _mm256_lddqu_si256 (__m256i const * mem_addr)
- VLDDQU ymm, m256
-
+ __m256i _mm256_lddqu_si256 (__m256i const * mem_addr)VLDDQU ymm, m256
To be added.
To be added.
@@ -2577,10 +2344,7 @@
To be added.
-
- __m256i _mm256_lddqu_si256 (__m256i const * mem_addr)
- VLDDQU ymm, m256
-
+ __m256i _mm256_lddqu_si256 (__m256i const * mem_addr)VLDDQU ymm, m256
To be added.
To be added.
@@ -2608,10 +2372,7 @@
To be added.
-
- __m256i _mm256_lddqu_si256 (__m256i const * mem_addr)
- VLDDQU ymm, m256
-
+ __m256i _mm256_lddqu_si256 (__m256i const * mem_addr)VLDDQU ymm, m256
To be added.
To be added.
@@ -2639,10 +2400,7 @@
To be added.
-
- __m256i _mm256_lddqu_si256 (__m256i const * mem_addr)
- VLDDQU ymm, m256
-
+ __m256i _mm256_lddqu_si256 (__m256i const * mem_addr)VLDDQU ymm, m256
To be added.
To be added.
@@ -2670,10 +2428,7 @@
To be added.
-
- __m256i _mm256_lddqu_si256 (__m256i const * mem_addr)
- VLDDQU ymm, m256
-
+ __m256i _mm256_lddqu_si256 (__m256i const * mem_addr)VLDDQU ymm, m256
To be added.
To be added.
@@ -2701,10 +2456,7 @@
To be added.
-
- __m256i _mm256_lddqu_si256 (__m256i const * mem_addr)
- VLDDQU ymm, m256
-
+ __m256i _mm256_lddqu_si256 (__m256i const * mem_addr)VLDDQU ymm, m256
To be added.
To be added.
@@ -2732,10 +2484,7 @@
To be added.
-
- __m256i _mm256_lddqu_si256 (__m256i const * mem_addr)
- VLDDQU ymm, m256
-
+ __m256i _mm256_lddqu_si256 (__m256i const * mem_addr)VLDDQU ymm, m256
To be added.
To be added.
@@ -2763,10 +2512,7 @@
To be added.
-
- __m256i _mm256_loadu_si256 (__m256i const * mem_addr)
- VMOVDQU ymm, m256
-
+ __m256i _mm256_loadu_si256 (__m256i const * mem_addr)VMOVDQU ymm, m256
To be added.
To be added.
@@ -2794,10 +2540,7 @@
To be added.
-
- __m256d _mm256_loadu_pd (double const * mem_addr)
- VMOVUPD ymm, ymm/m256
-
+ __m256d _mm256_loadu_pd (double const * mem_addr)VMOVUPD ymm, ymm/m256
To be added.
To be added.
@@ -2825,10 +2568,7 @@
To be added.
-
- __m256i _mm256_loadu_si256 (__m256i const * mem_addr)
- VMOVDQU ymm, m256
-
+ __m256i _mm256_loadu_si256 (__m256i const * mem_addr)VMOVDQU ymm, m256
To be added.
To be added.
@@ -2856,10 +2596,7 @@
To be added.
-
- __m256i _mm256_loadu_si256 (__m256i const * mem_addr)
- VMOVDQU ymm, m256
-
+ __m256i _mm256_loadu_si256 (__m256i const * mem_addr)VMOVDQU ymm, m256
To be added.
To be added.
@@ -2887,10 +2624,7 @@
To be added.
-
- __m256i _mm256_loadu_si256 (__m256i const * mem_addr)
- VMOVDQU ymm, m256
-
+ __m256i _mm256_loadu_si256 (__m256i const * mem_addr)VMOVDQU ymm, m256
To be added.
To be added.
@@ -2918,10 +2652,7 @@
To be added.
-
- __m256i _mm256_loadu_si256 (__m256i const * mem_addr)
- VMOVDQU ymm, m256
-
+ __m256i _mm256_loadu_si256 (__m256i const * mem_addr)VMOVDQU ymm, m256
To be added.
To be added.
@@ -2949,10 +2680,7 @@
To be added.
-
- __m256 _mm256_loadu_ps (float const * mem_addr)
- VMOVUPS ymm, ymm/m256
-
+ __m256 _mm256_loadu_ps (float const * mem_addr)VMOVUPS ymm, ymm/m256
To be added.
To be added.
@@ -2980,10 +2708,7 @@
To be added.
-
- __m256i _mm256_loadu_si256 (__m256i const * mem_addr)
- VMOVDQU ymm, m256
-
+ __m256i _mm256_loadu_si256 (__m256i const * mem_addr)VMOVDQU ymm, m256
To be added.
To be added.
@@ -3011,10 +2736,7 @@
To be added.
-
- __m256i _mm256_loadu_si256 (__m256i const * mem_addr)
- VMOVDQU ymm, m256
-
+ __m256i _mm256_loadu_si256 (__m256i const * mem_addr)VMOVDQU ymm, m256
To be added.
To be added.
@@ -3042,10 +2764,7 @@
To be added.
-
- __m256i _mm256_loadu_si256 (__m256i const * mem_addr)
- VMOVDQU ymm, m256
-
+ __m256i _mm256_loadu_si256 (__m256i const * mem_addr)VMOVDQU ymm, m256
To be added.
To be added.
@@ -3071,10 +2790,7 @@
To be added.
To be added.
-
- __m128d _mm_maskload_pd (double const * mem_addr, __m128i mask)
- VMASKMOVPD xmm, xmm, m128
-
+ __m128d _mm_maskload_pd (double const * mem_addr, __m128i mask)VMASKMOVPD xmm, xmm, m128
To be added.
To be added.
@@ -3100,10 +2816,7 @@
To be added.
To be added.
-
- __m256d _mm256_maskload_pd (double const * mem_addr, __m256i mask)
- VMASKMOVPD ymm, ymm, m256
-
+ __m256d _mm256_maskload_pd (double const * mem_addr, __m256i mask)VMASKMOVPD ymm, ymm, m256
To be added.
To be added.
@@ -3129,10 +2842,7 @@
To be added.
To be added.
-
- __m128 _mm_maskload_ps (float const * mem_addr, __m128i mask)
- VMASKMOVPS xmm, xmm, m128
-
+ __m128 _mm_maskload_ps (float const * mem_addr, __m128i mask)VMASKMOVPS xmm, xmm, m128
To be added.
To be added.
@@ -3158,10 +2868,7 @@
To be added.
To be added.
-
- __m256 _mm256_maskload_ps (float const * mem_addr, __m256i mask)
- VMASKMOVPS ymm, ymm, m256
-
+ __m256 _mm256_maskload_ps (float const * mem_addr, __m256i mask)VMASKMOVPS ymm, ymm, m256
To be added.
To be added.
@@ -3189,10 +2896,7 @@
To be added.
To be added.
To be added.
-
- void _mm_maskstore_pd (double * mem_addr, __m128i mask, __m128d a)
- VMASKMOVPD m128, xmm, xmm
-
+ void _mm_maskstore_pd (double * mem_addr, __m128i mask, __m128d a)VMASKMOVPD m128, xmm, xmm
To be added.
@@ -3219,10 +2923,7 @@
To be added.
To be added.
To be added.
-
- void _mm256_maskstore_pd (double * mem_addr, __m256i mask, __m256d a)
- VMASKMOVPD m256, ymm, ymm
-
+ void _mm256_maskstore_pd (double * mem_addr, __m256i mask, __m256d a)VMASKMOVPD m256, ymm, ymm
To be added.
@@ -3249,10 +2950,7 @@
To be added.
To be added.
To be added.
-
- void _mm_maskstore_ps (float * mem_addr, __m128i mask, __m128 a)
- VMASKMOVPS m128, xmm, xmm
-
+ void _mm_maskstore_ps (float * mem_addr, __m128i mask, __m128 a)VMASKMOVPS m128, xmm, xmm
To be added.
@@ -3279,10 +2977,7 @@
To be added.
To be added.
To be added.
-
- void _mm256_maskstore_ps (float * mem_addr, __m256i mask, __m256 a)
- VMASKMOVPS m256, ymm, ymm
-
+ void _mm256_maskstore_ps (float * mem_addr, __m256i mask, __m256 a)VMASKMOVPS m256, ymm, ymm
To be added.
@@ -3312,10 +3007,7 @@
To be added.
To be added.
-
- __m256d _mm256_max_pd (__m256d a, __m256d b)
- VMAXPD ymm, ymm, ymm/m256
-
+ __m256d _mm256_max_pd (__m256d a, __m256d b)VMAXPD ymm, ymm, ymm/m256
To be added.
To be added.
@@ -3346,10 +3038,7 @@
To be added.
To be added.
-
- __m256 _mm256_max_ps (__m256 a, __m256 b)
- VMAXPS ymm, ymm, ymm/m256
-
+ __m256 _mm256_max_ps (__m256 a, __m256 b)VMAXPS ymm, ymm, ymm/m256
To be added.
To be added.
@@ -3380,10 +3069,7 @@
To be added.
To be added.
-
- __m256d _mm256_min_pd (__m256d a, __m256d b)
- VMINPD ymm, ymm, ymm/m256
-
+ __m256d _mm256_min_pd (__m256d a, __m256d b)VMINPD ymm, ymm, ymm/m256
To be added.
To be added.
@@ -3414,10 +3100,7 @@
To be added.
To be added.
-
- __m256 _mm256_min_ps (__m256 a, __m256 b)
- VMINPS ymm, ymm, ymm/m256
-
+ __m256 _mm256_min_ps (__m256 a, __m256 b)VMINPS ymm, ymm, ymm/m256
To be added.
To be added.
@@ -3446,10 +3129,7 @@
To be added.
-
- int _mm256_movemask_pd (__m256d a)
- VMOVMSKPD reg, ymm
-
+ int _mm256_movemask_pd (__m256d a)VMOVMSKPD reg, ymm
To be added.
To be added.
@@ -3478,10 +3158,7 @@
To be added.
-
- int _mm256_movemask_ps (__m256 a)
- VMOVMSKPS reg, ymm
-
+ int _mm256_movemask_ps (__m256 a)VMOVMSKPS reg, ymm
To be added.
To be added.
@@ -3512,10 +3189,7 @@
To be added.
To be added.
-
- __m256d _mm256_mul_pd (__m256d a, __m256d b)
- VMULPD ymm, ymm, ymm/m256
-
+ __m256d _mm256_mul_pd (__m256d a, __m256d b)VMULPD ymm, ymm, ymm/m256
To be added.
To be added.
@@ -3546,10 +3220,7 @@
To be added.
To be added.
-
- __m256 _mm256_mul_ps (__m256 a, __m256 b)
- VMULPS ymm, ymm, ymm/m256
-
+ __m256 _mm256_mul_ps (__m256 a, __m256 b)VMULPS ymm, ymm, ymm/m256
To be added.
To be added.
@@ -3580,10 +3251,7 @@
To be added.
To be added.
-
- __m256d _mm256_or_pd (__m256d a, __m256d b)
- VORPD ymm, ymm, ymm/m256
-
+ __m256d _mm256_or_pd (__m256d a, __m256d b)VORPD ymm, ymm, ymm/m256
To be added.
To be added.
@@ -3614,10 +3282,7 @@
To be added.
To be added.
-
- __m256 _mm256_or_ps (__m256 a, __m256 b)
- VORPS ymm, ymm, ymm/m256
-
+ __m256 _mm256_or_ps (__m256 a, __m256 b)VORPS ymm, ymm, ymm/m256
To be added.
To be added.
@@ -3648,10 +3313,7 @@
To be added.
To be added.
-
- __m128d _mm_permute_pd (__m128d a, int imm8)
- VPERMILPD xmm, xmm, imm8
-
+ __m128d _mm_permute_pd (__m128d a, int imm8)VPERMILPD xmm, xmm, imm8
To be added.
To be added.
@@ -3682,10 +3344,7 @@
To be added.
To be added.
-
- __m128 _mm_permute_ps (__m128 a, int imm8)
- VPERMILPS xmm, xmm, imm8
-
+ __m128 _mm_permute_ps (__m128 a, int imm8)VPERMILPS xmm, xmm, imm8
To be added.
To be added.
@@ -3716,10 +3375,7 @@
To be added.
To be added.
-
- __m256d _mm256_permute_pd (__m256d a, int imm8)
- VPERMILPD ymm, ymm, imm8
-
+ __m256d _mm256_permute_pd (__m256d a, int imm8)VPERMILPD ymm, ymm, imm8
To be added.
To be added.
@@ -3750,10 +3406,7 @@
To be added.
To be added.
-
- __m256 _mm256_permute_ps (__m256 a, int imm8)
- VPERMILPS ymm, ymm, imm8
-
+ __m256 _mm256_permute_ps (__m256 a, int imm8)VPERMILPS ymm, ymm, imm8
To be added.
To be added.
@@ -3782,10 +3435,7 @@
To be added.
To be added.
To be added.
-
- __m256i _mm256_permute2f128_si256 (__m256i a, __m256i b, int imm8)
- VPERM2F128 ymm, ymm, ymm/m256, imm8
-
+ __m256i _mm256_permute2f128_si256 (__m256i a, __m256i b, int imm8)VPERM2F128 ymm, ymm, ymm/m256, imm8
To be added.
To be added.
@@ -3814,10 +3464,7 @@
To be added.
To be added.
To be added.
-
- __m256d _mm256_permute2f128_pd (__m256d a, __m256d b, int imm8)
- VPERM2F128 ymm, ymm, ymm/m256, imm8
-
+ __m256d _mm256_permute2f128_pd (__m256d a, __m256d b, int imm8)VPERM2F128 ymm, ymm, ymm/m256, imm8
To be added.
To be added.
@@ -3846,10 +3493,7 @@
To be added.
To be added.
To be added.
-
- __m256i _mm256_permute2f128_si256 (__m256i a, __m256i b, int imm8)
- VPERM2F128 ymm, ymm, ymm/m256, imm8
-
+ __m256i _mm256_permute2f128_si256 (__m256i a, __m256i b, int imm8)VPERM2F128 ymm, ymm, ymm/m256, imm8
To be added.
To be added.
@@ -3878,10 +3522,7 @@
To be added.
To be added.
To be added.
-
- __m256i _mm256_permute2f128_si256 (__m256i a, __m256i b, int imm8)
- VPERM2F128 ymm, ymm, ymm/m256, imm8
-
+ __m256i _mm256_permute2f128_si256 (__m256i a, __m256i b, int imm8)VPERM2F128 ymm, ymm, ymm/m256, imm8
To be added.
To be added.
@@ -3910,10 +3551,7 @@
To be added.
To be added.
To be added.
-
- __m256i _mm256_permute2f128_si256 (__m256i a, __m256i b, int imm8)
- VPERM2F128 ymm, ymm, ymm/m256, imm8
-
+ __m256i _mm256_permute2f128_si256 (__m256i a, __m256i b, int imm8)VPERM2F128 ymm, ymm, ymm/m256, imm8
To be added.
To be added.
@@ -3942,10 +3580,7 @@
To be added.
To be added.
To be added.
-
- __m256i _mm256_permute2f128_si256 (__m256i a, __m256i b, int imm8)
- VPERM2F128 ymm, ymm, ymm/m256, imm8
-
+ __m256i _mm256_permute2f128_si256 (__m256i a, __m256i b, int imm8)VPERM2F128 ymm, ymm, ymm/m256, imm8
To be added.
To be added.
@@ -3974,10 +3609,7 @@
To be added.
To be added.
To be added.
-
- __m256 _mm256_permute2f128_ps (__m256 a, __m256 b, int imm8)
- VPERM2F128 ymm, ymm, ymm/m256, imm8
-
+ __m256 _mm256_permute2f128_ps (__m256 a, __m256 b, int imm8)VPERM2F128 ymm, ymm, ymm/m256, imm8
To be added.
To be added.
@@ -4006,10 +3638,7 @@
To be added.
To be added.
To be added.
-
- __m256i _mm256_permute2f128_si256 (__m256i a, __m256i b, int imm8)
- VPERM2F128 ymm, ymm, ymm/m256, imm8
-
+ __m256i _mm256_permute2f128_si256 (__m256i a, __m256i b, int imm8)VPERM2F128 ymm, ymm, ymm/m256, imm8
To be added.
To be added.
@@ -4038,10 +3667,7 @@
To be added.
To be added.
To be added.
-
- __m256i _mm256_permute2f128_si256 (__m256i a, __m256i b, int imm8)
- VPERM2F128 ymm, ymm, ymm/m256, imm8
-
+ __m256i _mm256_permute2f128_si256 (__m256i a, __m256i b, int imm8)VPERM2F128 ymm, ymm, ymm/m256, imm8
To be added.
To be added.
@@ -4070,10 +3696,7 @@
To be added.
To be added.
To be added.
-
- __m256i _mm256_permute2f128_si256 (__m256i a, __m256i b, int imm8)
- VPERM2F128 ymm, ymm, ymm/m256, imm8
-
+ __m256i _mm256_permute2f128_si256 (__m256i a, __m256i b, int imm8)VPERM2F128 ymm, ymm, ymm/m256, imm8
To be added.
To be added.
@@ -4100,10 +3723,7 @@
To be added.
To be added.
-
- __m128d _mm_permutevar_pd (__m128d a, __m128i b)
- VPERMILPD xmm, xmm, xmm/m128
-
+ __m128d _mm_permutevar_pd (__m128d a, __m128i b)VPERMILPD xmm, xmm, xmm/m128
To be added.
To be added.
@@ -4130,10 +3750,7 @@
To be added.
To be added.
-
- __m128 _mm_permutevar_ps (__m128 a, __m128i b)
- VPERMILPS xmm, xmm, xmm/m128
-
+ __m128 _mm_permutevar_ps (__m128 a, __m128i b)VPERMILPS xmm, xmm, xmm/m128
To be added.
To be added.
@@ -4160,10 +3777,7 @@
To be added.
To be added.
-
- __m256d _mm256_permutevar_pd (__m256d a, __m256i b)
- VPERMILPD ymm, ymm, ymm/m256
-
+ __m256d _mm256_permutevar_pd (__m256d a, __m256i b)VPERMILPD ymm, ymm, ymm/m256
To be added.
To be added.
@@ -4190,10 +3804,7 @@
To be added.
To be added.
-
- __m256 _mm256_permutevar_ps (__m256 a, __m256i b)
- VPERMILPS ymm, ymm, ymm/m256
-
+ __m256 _mm256_permutevar_ps (__m256 a, __m256i b)VPERMILPS ymm, ymm, ymm/m256
To be added.
To be added.
@@ -4222,10 +3833,7 @@
To be added.
-
- __m256 _mm256_rcp_ps (__m256 a)
- VRCPPS ymm, ymm/m256
-
+ __m256 _mm256_rcp_ps (__m256 a)VRCPPS ymm, ymm/m256
To be added.
To be added.
@@ -4254,10 +3862,7 @@
To be added.
-
- __m256 _mm256_rsqrt_ps (__m256 a)
- VRSQRTPS ymm, ymm/m256
-
+ __m256 _mm256_rsqrt_ps (__m256 a)VRSQRTPS ymm, ymm/m256
To be added.
To be added.
@@ -4286,10 +3891,7 @@
To be added.
-
- __m256d _mm256_round_pd (__m256d a, _MM_FROUND_CUR_DIRECTION)
- VROUNDPD ymm, ymm/m256, imm8(4)
-
+ __m256d _mm256_round_pd (__m256d a, _MM_FROUND_CUR_DIRECTION)VROUNDPD ymm, ymm/m256, imm8(4)
To be added.
To be added.
@@ -4318,10 +3920,7 @@
To be added.
-
- __m256 _mm256_round_ps (__m256 a, _MM_FROUND_CUR_DIRECTION)
- VROUNDPS ymm, ymm/m256, imm8(4)
-
+ __m256 _mm256_round_ps (__m256 a, _MM_FROUND_CUR_DIRECTION)VROUNDPS ymm, ymm/m256, imm8(4)
To be added.
To be added.
@@ -4350,10 +3949,7 @@
To be added.
-
- __m256d _mm256_round_pd (__m256d a, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC)
- VROUNDPD ymm, ymm/m256, imm8(8)
-
+ __m256d _mm256_round_pd (__m256d a, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC)VROUNDPD ymm, ymm/m256, imm8(8)
To be added.
To be added.
@@ -4382,10 +3978,7 @@
To be added.
-
- __m256 _mm256_round_ps (__m256 a, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC)
- VROUNDPS ymm, ymm/m256, imm8(8)
-
+ __m256 _mm256_round_ps (__m256 a, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC)VROUNDPS ymm, ymm/m256, imm8(8)
To be added.
To be added.
@@ -4414,10 +4007,7 @@
To be added.
-
- __m256d _mm256_round_pd (__m256d a, _MM_FROUND_TO_NEG_INF | _MM_FROUND_NO_EXC)
- VROUNDPD ymm, ymm/m256, imm8(9)
-
+ __m256d _mm256_round_pd (__m256d a, _MM_FROUND_TO_NEG_INF | _MM_FROUND_NO_EXC)VROUNDPD ymm, ymm/m256, imm8(9)
To be added.
To be added.
@@ -4446,10 +4036,7 @@
To be added.
-
- __m256 _mm256_round_ps (__m256 a, _MM_FROUND_TO_NEG_INF | _MM_FROUND_NO_EXC)
- VROUNDPS ymm, ymm/m256, imm8(9)
-
+ __m256 _mm256_round_ps (__m256 a, _MM_FROUND_TO_NEG_INF | _MM_FROUND_NO_EXC)VROUNDPS ymm, ymm/m256, imm8(9)
To be added.
To be added.
@@ -4478,10 +4065,7 @@
To be added.
-
- __m256d _mm256_round_pd (__m256d a, _MM_FROUND_TO_POS_INF | _MM_FROUND_NO_EXC)
- VROUNDPD ymm, ymm/m256, imm8(10)
-
+ __m256d _mm256_round_pd (__m256d a, _MM_FROUND_TO_POS_INF | _MM_FROUND_NO_EXC)VROUNDPD ymm, ymm/m256, imm8(10)
To be added.
To be added.
@@ -4510,10 +4094,7 @@
To be added.
-
- __m256 _mm256_round_ps (__m256 a, _MM_FROUND_TO_POS_INF | _MM_FROUND_NO_EXC)
- VROUNDPS ymm, ymm/m256, imm8(10)
-
+ __m256 _mm256_round_ps (__m256 a, _MM_FROUND_TO_POS_INF | _MM_FROUND_NO_EXC)VROUNDPS ymm, ymm/m256, imm8(10)
To be added.
To be added.
@@ -4542,10 +4123,7 @@
To be added.
-
- __m256d _mm256_round_pd (__m256d a, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC)
- VROUNDPD ymm, ymm/m256, imm8(11)
-
+ __m256d _mm256_round_pd (__m256d a, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC)VROUNDPD ymm, ymm/m256, imm8(11)
To be added.
To be added.
@@ -4574,10 +4152,7 @@
To be added.
-
- __m256 _mm256_round_ps (__m256 a, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC)
- VROUNDPS ymm, ymm/m256, imm8(11)
-
+ __m256 _mm256_round_ps (__m256 a, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC)VROUNDPS ymm, ymm/m256, imm8(11)
To be added.
To be added.
@@ -4610,10 +4185,7 @@
To be added.
To be added.
To be added.
-
- __m256d _mm256_shuffle_pd (__m256d a, __m256d b, const int imm8)
- VSHUFPD ymm, ymm, ymm/m256, imm8
-
+ __m256d _mm256_shuffle_pd (__m256d a, __m256d b, const int imm8)VSHUFPD ymm, ymm, ymm/m256, imm8
To be added.
To be added.
@@ -4646,10 +4218,7 @@
To be added.
To be added.
To be added.
-
- __m256 _mm256_shuffle_ps (__m256 a, __m256 b, const int imm8)
- VSHUFPS ymm, ymm, ymm/m256, imm8
-
+ __m256 _mm256_shuffle_ps (__m256 a, __m256 b, const int imm8)VSHUFPS ymm, ymm, ymm/m256, imm8
To be added.
To be added.
@@ -4678,10 +4247,7 @@
To be added.
-
- __m256d _mm256_sqrt_pd (__m256d a)
- VSQRTPD ymm, ymm/m256
-
+ __m256d _mm256_sqrt_pd (__m256d a)VSQRTPD ymm, ymm/m256
To be added.
To be added.
@@ -4710,10 +4276,7 @@
To be added.
-
- __m256 _mm256_sqrt_ps (__m256 a)
- VSQRTPS ymm, ymm/m256
-
+ __m256 _mm256_sqrt_ps (__m256 a)VSQRTPS ymm, ymm/m256
To be added.
To be added.
@@ -4743,10 +4306,7 @@
To be added.
To be added.
-
- void _mm256_storeu_si256 (__m256i * mem_addr, __m256i a)
- MOVDQU m256, ymm
-
+ void _mm256_storeu_si256 (__m256i * mem_addr, __m256i a)MOVDQU m256, ymm
To be added.
@@ -4775,10 +4335,7 @@
To be added.
To be added.
-
- void _mm256_storeu_pd (double * mem_addr, __m256d a)
- MOVUPD m256, ymm
-
+ void _mm256_storeu_pd (double * mem_addr, __m256d a)MOVUPD m256, ymm
To be added.
@@ -4807,10 +4364,7 @@
To be added.
To be added.
-
- void _mm256_storeu_si256 (__m256i * mem_addr, __m256i a)
- MOVDQU m256, ymm
-
+ void _mm256_storeu_si256 (__m256i * mem_addr, __m256i a)MOVDQU m256, ymm
To be added.
@@ -4839,10 +4393,7 @@
To be added.
To be added.
-
- void _mm256_storeu_si256 (__m256i * mem_addr, __m256i a)
- MOVDQU m256, ymm
-
+ void _mm256_storeu_si256 (__m256i * mem_addr, __m256i a)MOVDQU m256, ymm
To be added.
@@ -4871,10 +4422,7 @@
To be added.
To be added.
-
- void _mm256_storeu_si256 (__m256i * mem_addr, __m256i a)
- MOVDQU m256, ymm
-
+ void _mm256_storeu_si256 (__m256i * mem_addr, __m256i a)MOVDQU m256, ymm
To be added.
@@ -4903,10 +4451,7 @@
To be added.
To be added.
-
- void _mm256_storeu_si256 (__m256i * mem_addr, __m256i a)
- MOVDQU m256, ymm
-
+ void _mm256_storeu_si256 (__m256i * mem_addr, __m256i a)MOVDQU m256, ymm
To be added.
@@ -4935,10 +4480,7 @@
To be added.
To be added.
-
- void _mm256_storeu_ps (float * mem_addr, __m256 a)
- MOVUPS m256, ymm
-
+ void _mm256_storeu_ps (float * mem_addr, __m256 a)MOVUPS m256, ymm
To be added.
@@ -4967,10 +4509,7 @@
To be added.
To be added.
-
- void _mm256_storeu_si256 (__m256i * mem_addr, __m256i a)
- MOVDQU m256, ymm
-
+ void _mm256_storeu_si256 (__m256i * mem_addr, __m256i a)MOVDQU m256, ymm
To be added.
@@ -4999,10 +4538,7 @@
To be added.
To be added.
-
- void _mm256_storeu_si256 (__m256i * mem_addr, __m256i a)
- MOVDQU m256, ymm
-
+ void _mm256_storeu_si256 (__m256i * mem_addr, __m256i a)MOVDQU m256, ymm
To be added.
@@ -5031,10 +4567,7 @@
To be added.
To be added.
-
- void _mm256_storeu_si256 (__m256i * mem_addr, __m256i a)
- MOVDQU m256, ymm
-
+ void _mm256_storeu_si256 (__m256i * mem_addr, __m256i a)MOVDQU m256, ymm
To be added.
@@ -5063,10 +4596,7 @@
To be added.
To be added.
-
- void _mm256_store_si256 (__m256i * mem_addr, __m256i a)
- MOVDQA m256, ymm
-
+ void _mm256_store_si256 (__m256i * mem_addr, __m256i a)MOVDQA m256, ymm
To be added.
@@ -5095,10 +4625,7 @@
To be added.
To be added.
-
- void _mm256_store_pd (double * mem_addr, __m256d a)
- VMOVAPD m256, ymm
-
+ void _mm256_store_pd (double * mem_addr, __m256d a)VMOVAPD m256, ymm
To be added.
@@ -5127,10 +4654,7 @@
To be added.
To be added.
-
- void _mm256_store_si256 (__m256i * mem_addr, __m256i a)
- MOVDQA m256, ymm
-
+ void _mm256_store_si256 (__m256i * mem_addr, __m256i a)MOVDQA m256, ymm
To be added.
@@ -5159,10 +4683,7 @@
To be added.
To be added.
-
- void _mm256_store_si256 (__m256i * mem_addr, __m256i a)
- MOVDQA m256, ymm
-
+ void _mm256_store_si256 (__m256i * mem_addr, __m256i a)MOVDQA m256, ymm
To be added.
@@ -5191,10 +4712,7 @@
To be added.
To be added.
-
- void _mm256_store_si256 (__m256i * mem_addr, __m256i a)
- MOVDQA m256, ymm
-
+ void _mm256_store_si256 (__m256i * mem_addr, __m256i a)MOVDQA m256, ymm
To be added.
@@ -5223,10 +4741,7 @@
To be added.
To be added.
-
- void _mm256_store_si256 (__m256i * mem_addr, __m256i a)
- MOVDQA m256, ymm
-
+ void _mm256_store_si256 (__m256i * mem_addr, __m256i a)MOVDQA m256, ymm
To be added.
@@ -5255,10 +4770,7 @@
To be added.
To be added.
-
- void _mm256_store_ps (float * mem_addr, __m256 a)
- VMOVAPS m256, ymm
-
+ void _mm256_store_ps (float * mem_addr, __m256 a)VMOVAPS m256, ymm
To be added.
@@ -5287,10 +4799,7 @@
To be added.
To be added.
-
- void _mm256_store_si256 (__m256i * mem_addr, __m256i a)
- MOVDQA m256, ymm
-
+ void _mm256_store_si256 (__m256i * mem_addr, __m256i a)MOVDQA m256, ymm
To be added.
@@ -5319,10 +4828,7 @@
To be added.
To be added.
-
- void _mm256_store_si256 (__m256i * mem_addr, __m256i a)
- MOVDQA m256, ymm
-
+ void _mm256_store_si256 (__m256i * mem_addr, __m256i a)MOVDQA m256, ymm
To be added.
@@ -5351,10 +4857,7 @@
To be added.
To be added.
-
- void _mm256_store_si256 (__m256i * mem_addr, __m256i a)
- MOVDQA m256, ymm
-
+ void _mm256_store_si256 (__m256i * mem_addr, __m256i a)MOVDQA m256, ymm
To be added.
@@ -5383,10 +4886,7 @@
To be added.
To be added.
-
- void _mm256_stream_si256 (__m256i * mem_addr, __m256i a)
- VMOVNTDQ m256, ymm
-
+ void _mm256_stream_si256 (__m256i * mem_addr, __m256i a)VMOVNTDQ m256, ymm
To be added.
@@ -5415,10 +4915,7 @@
To be added.
To be added.
-
- void _mm256_stream_pd (double * mem_addr, __m256d a)
- MOVNTPD m256, ymm
-
+ void _mm256_stream_pd (double * mem_addr, __m256d a)MOVNTPD m256, ymm
To be added.
@@ -5447,10 +4944,7 @@
To be added.
To be added.
-
- void _mm256_stream_si256 (__m256i * mem_addr, __m256i a)
- VMOVNTDQ m256, ymm
-
+ void _mm256_stream_si256 (__m256i * mem_addr, __m256i a)VMOVNTDQ m256, ymm
To be added.
@@ -5479,10 +4973,7 @@
To be added.
To be added.
-
- void _mm256_stream_si256 (__m256i * mem_addr, __m256i a)
- VMOVNTDQ m256, ymm
-
+ void _mm256_stream_si256 (__m256i * mem_addr, __m256i a)VMOVNTDQ m256, ymm
To be added.
@@ -5511,10 +5002,7 @@
To be added.
To be added.
-
- void _mm256_stream_si256 (__m256i * mem_addr, __m256i a)
- VMOVNTDQ m256, ymm
-
+ void _mm256_stream_si256 (__m256i * mem_addr, __m256i a)VMOVNTDQ m256, ymm
To be added.
@@ -5543,10 +5031,7 @@
To be added.
To be added.
-
- void _mm256_stream_si256 (__m256i * mem_addr, __m256i a)
- VMOVNTDQ m256, ymm
-
+ void _mm256_stream_si256 (__m256i * mem_addr, __m256i a)VMOVNTDQ m256, ymm
To be added.
@@ -5575,10 +5060,7 @@
To be added.
To be added.
-
- void _mm256_stream_ps (float * mem_addr, __m256 a)
- MOVNTPS m256, ymm
-
+ void _mm256_stream_ps (float * mem_addr, __m256 a)MOVNTPS m256, ymm
To be added.
@@ -5607,10 +5089,7 @@
To be added.
To be added.
-
- void _mm256_stream_si256 (__m256i * mem_addr, __m256i a)
- VMOVNTDQ m256, ymm
-
+ void _mm256_stream_si256 (__m256i * mem_addr, __m256i a)VMOVNTDQ m256, ymm
To be added.
@@ -5639,10 +5118,7 @@
To be added.
To be added.
-
- void _mm256_stream_si256 (__m256i * mem_addr, __m256i a)
- VMOVNTDQ m256, ymm
-
+ void _mm256_stream_si256 (__m256i * mem_addr, __m256i a)VMOVNTDQ m256, ymm
To be added.
@@ -5671,10 +5147,7 @@
To be added.
To be added.
-
- void _mm256_stream_si256 (__m256i * mem_addr, __m256i a)
- VMOVNTDQ m256, ymm
-
+ void _mm256_stream_si256 (__m256i * mem_addr, __m256i a)VMOVNTDQ m256, ymm
To be added.
@@ -5704,10 +5177,7 @@
To be added.
To be added.
-
- __m256d _mm256_sub_pd (__m256d a, __m256d b)
- VSUBPD ymm, ymm, ymm/m256
-
+ __m256d _mm256_sub_pd (__m256d a, __m256d b)VSUBPD ymm, ymm, ymm/m256
To be added.
To be added.
@@ -5738,10 +5208,7 @@
To be added.
To be added.
-
- __m256 _mm256_sub_ps (__m256 a, __m256 b)
- VSUBPS ymm, ymm, ymm/m256
-
+ __m256 _mm256_sub_ps (__m256 a, __m256 b)VSUBPS ymm, ymm, ymm/m256
To be added.
To be added.
@@ -5772,10 +5239,7 @@
To be added.
To be added.
-
- int _mm_testc_pd (__m128d a, __m128d b)
- VTESTPD xmm, xmm/m128
-
+ int _mm_testc_pd (__m128d a, __m128d b)VTESTPD xmm, xmm/m128
To be added.
To be added.
@@ -5806,10 +5270,7 @@
To be added.
To be added.
-
- int _mm_testc_ps (__m128 a, __m128 b)
- VTESTPS xmm, xmm/m128
-
+ int _mm_testc_ps (__m128 a, __m128 b)VTESTPS xmm, xmm/m128
To be added.
To be added.
@@ -5836,10 +5297,7 @@
To be added.
To be added.
-
- int _mm256_testc_si256 (__m256i a, __m256i b)
- VPTEST ymm, ymm/m256
-
+ int _mm256_testc_si256 (__m256i a, __m256i b)VPTEST ymm, ymm/m256
To be added.
To be added.
@@ -5866,10 +5324,7 @@
To be added.
To be added.
-
- int _mm256_testc_pd (__m256d a, __m256d b)
- VTESTPS ymm, ymm/m256
-
+ int _mm256_testc_pd (__m256d a, __m256d b)VTESTPS ymm, ymm/m256
To be added.
To be added.
@@ -5896,10 +5351,7 @@
To be added.
To be added.
-
- int _mm256_testc_si256 (__m256i a, __m256i b)
- VPTEST ymm, ymm/m256
-
+ int _mm256_testc_si256 (__m256i a, __m256i b)VPTEST ymm, ymm/m256
To be added.
To be added.
@@ -5926,10 +5378,7 @@
To be added.
To be added.
-
- int _mm256_testc_si256 (__m256i a, __m256i b)
- VPTEST ymm, ymm/m256
-
+ int _mm256_testc_si256 (__m256i a, __m256i b)VPTEST ymm, ymm/m256
To be added.
To be added.
@@ -5956,10 +5405,7 @@
To be added.
To be added.
-
- int _mm256_testc_si256 (__m256i a, __m256i b)
- VPTEST ymm, ymm/m256
-
+ int _mm256_testc_si256 (__m256i a, __m256i b)VPTEST ymm, ymm/m256
To be added.
To be added.
@@ -5986,10 +5432,7 @@
To be added.
To be added.
-
- int _mm256_testc_si256 (__m256i a, __m256i b)
- VPTEST ymm, ymm/m256
-
+ int _mm256_testc_si256 (__m256i a, __m256i b)VPTEST ymm, ymm/m256
To be added.
To be added.
@@ -6016,10 +5459,7 @@
To be added.
To be added.
-
- int _mm256_testc_ps (__m256 a, __m256 b)
- VTESTPS ymm, ymm/m256
-
+ int _mm256_testc_ps (__m256 a, __m256 b)VTESTPS ymm, ymm/m256
To be added.
To be added.
@@ -6046,10 +5486,7 @@
To be added.
To be added.
-
- int _mm256_testc_si256 (__m256i a, __m256i b)
- VPTEST ymm, ymm/m256
-
+ int _mm256_testc_si256 (__m256i a, __m256i b)VPTEST ymm, ymm/m256
To be added.
To be added.
@@ -6076,10 +5513,7 @@
To be added.
To be added.
-
- int _mm256_testc_si256 (__m256i a, __m256i b)
- VPTEST ymm, ymm/m256
-
+ int _mm256_testc_si256 (__m256i a, __m256i b)VPTEST ymm, ymm/m256
To be added.
To be added.
@@ -6106,10 +5540,7 @@
To be added.
To be added.
-
- int _mm256_testc_si256 (__m256i a, __m256i b)
- VPTEST ymm, ymm/m256
-
+ int _mm256_testc_si256 (__m256i a, __m256i b)VPTEST ymm, ymm/m256
To be added.
To be added.
@@ -6140,10 +5571,7 @@
To be added.
To be added.
-
- int _mm_testnzc_pd (__m128d a, __m128d b)
- VTESTPD xmm, xmm/m128
-
+ int _mm_testnzc_pd (__m128d a, __m128d b)VTESTPD xmm, xmm/m128
To be added.
To be added.
@@ -6174,10 +5602,7 @@
To be added.
To be added.
-
- int _mm_testnzc_ps (__m128 a, __m128 b)
- VTESTPS xmm, xmm/m128
-
+ int _mm_testnzc_ps (__m128 a, __m128 b)VTESTPS xmm, xmm/m128
To be added.
To be added.
@@ -6204,10 +5629,7 @@
To be added.
To be added.
-
- int _mm256_testnzc_si256 (__m256i a, __m256i b)
- VPTEST ymm, ymm/m256
-
+ int _mm256_testnzc_si256 (__m256i a, __m256i b)VPTEST ymm, ymm/m256
To be added.
To be added.
@@ -6234,10 +5656,7 @@
To be added.
To be added.
-
- int _mm256_testnzc_pd (__m256d a, __m256d b)
- VTESTPD ymm, ymm/m256
-
+ int _mm256_testnzc_pd (__m256d a, __m256d b)VTESTPD ymm, ymm/m256
To be added.
To be added.
@@ -6264,10 +5683,7 @@
To be added.
To be added.
-
- int _mm256_testnzc_si256 (__m256i a, __m256i b)
- VPTEST ymm, ymm/m256
-
+ int _mm256_testnzc_si256 (__m256i a, __m256i b)VPTEST ymm, ymm/m256
To be added.
To be added.
@@ -6294,10 +5710,7 @@
To be added.
To be added.
-
- int _mm256_testnzc_si256 (__m256i a, __m256i b)
- VPTEST ymm, ymm/m256
-
+ int _mm256_testnzc_si256 (__m256i a, __m256i b)VPTEST ymm, ymm/m256
To be added.
To be added.
@@ -6324,10 +5737,7 @@
To be added.
To be added.
-
- int _mm256_testnzc_si256 (__m256i a, __m256i b)
- VPTEST ymm, ymm/m256
-
+ int _mm256_testnzc_si256 (__m256i a, __m256i b)VPTEST ymm, ymm/m256
To be added.
To be added.
@@ -6354,10 +5764,7 @@
To be added.
To be added.
-
- int _mm256_testnzc_si256 (__m256i a, __m256i b)
- VPTEST ymm, ymm/m256
-
+ int _mm256_testnzc_si256 (__m256i a, __m256i b)VPTEST ymm, ymm/m256
To be added.
To be added.
@@ -6384,10 +5791,7 @@
To be added.
To be added.
-
- int _mm256_testnzc_ps (__m256 a, __m256 b)
- VTESTPS ymm, ymm/m256
-
+ int _mm256_testnzc_ps (__m256 a, __m256 b)VTESTPS ymm, ymm/m256
To be added.
To be added.
@@ -6414,10 +5818,7 @@
To be added.
To be added.
-
- int _mm256_testnzc_si256 (__m256i a, __m256i b)
- VPTEST ymm, ymm/m256
-
+ int _mm256_testnzc_si256 (__m256i a, __m256i b)VPTEST ymm, ymm/m256
To be added.
To be added.
@@ -6444,10 +5845,7 @@
To be added.
To be added.
-
- int _mm256_testnzc_si256 (__m256i a, __m256i b)
- VPTEST ymm, ymm/m256
-
+ int _mm256_testnzc_si256 (__m256i a, __m256i b)VPTEST ymm, ymm/m256
To be added.
To be added.
@@ -6474,10 +5872,7 @@
To be added.
To be added.
-
- int _mm256_testnzc_si256 (__m256i a, __m256i b)
- VPTEST ymm, ymm/m256
-
+ int _mm256_testnzc_si256 (__m256i a, __m256i b)VPTEST ymm, ymm/m256
To be added.
To be added.
@@ -6508,10 +5903,7 @@
To be added.
To be added.
-
- int _mm_testz_pd (__m128d a, __m128d b)
- VTESTPD xmm, xmm/m128
-
+ int _mm_testz_pd (__m128d a, __m128d b)VTESTPD xmm, xmm/m128
To be added.
To be added.
@@ -6542,10 +5934,7 @@
To be added.
To be added.
-
- int _mm_testz_ps (__m128 a, __m128 b)
- VTESTPS xmm, xmm/m128
-
+ int _mm_testz_ps (__m128 a, __m128 b)VTESTPS xmm, xmm/m128
To be added.
To be added.
@@ -6572,10 +5961,7 @@
To be added.
To be added.
-
- int _mm256_testz_si256 (__m256i a, __m256i b)
- VPTEST ymm, ymm/m256
-
+ int _mm256_testz_si256 (__m256i a, __m256i b)VPTEST ymm, ymm/m256
To be added.
To be added.
@@ -6602,10 +5988,7 @@
To be added.
To be added.
-
- int _mm256_testz_pd (__m256d a, __m256d b)
- VTESTPD ymm, ymm/m256
-
+ int _mm256_testz_pd (__m256d a, __m256d b)VTESTPD ymm, ymm/m256
To be added.
To be added.
@@ -6632,10 +6015,7 @@
To be added.
To be added.
-
- int _mm256_testz_si256 (__m256i a, __m256i b)
- VPTEST ymm, ymm/m256
-
+ int _mm256_testz_si256 (__m256i a, __m256i b)VPTEST ymm, ymm/m256
To be added.
To be added.
@@ -6662,10 +6042,7 @@
To be added.
To be added.
-
- int _mm256_testz_si256 (__m256i a, __m256i b)
- VPTEST ymm, ymm/m256
-
+ int _mm256_testz_si256 (__m256i a, __m256i b)VPTEST ymm, ymm/m256
To be added.
To be added.
@@ -6692,10 +6069,7 @@
To be added.
To be added.
-
- int _mm256_testz_si256 (__m256i a, __m256i b)
- VPTEST ymm, ymm/m256
-
+ int _mm256_testz_si256 (__m256i a, __m256i b)VPTEST ymm, ymm/m256
To be added.
To be added.
@@ -6722,10 +6096,7 @@
To be added.
To be added.
-
- int _mm256_testz_si256 (__m256i a, __m256i b)
- VPTEST ymm, ymm/m256
-
+ int _mm256_testz_si256 (__m256i a, __m256i b)VPTEST ymm, ymm/m256
To be added.
To be added.
@@ -6752,10 +6123,7 @@
To be added.
To be added.
-
- int _mm256_testz_ps (__m256 a, __m256 b)
- VTESTPS ymm, ymm/m256
-
+ int _mm256_testz_ps (__m256 a, __m256 b)VTESTPS ymm, ymm/m256
To be added.
To be added.
@@ -6782,10 +6150,7 @@
To be added.
To be added.
-
- int _mm256_testz_si256 (__m256i a, __m256i b)
- VPTEST ymm, ymm/m256
-
+ int _mm256_testz_si256 (__m256i a, __m256i b)VPTEST ymm, ymm/m256
To be added.
To be added.
@@ -6812,10 +6177,7 @@
To be added.
To be added.
-
- int _mm256_testz_si256 (__m256i a, __m256i b)
- VPTEST ymm, ymm/m256
-
+ int _mm256_testz_si256 (__m256i a, __m256i b)VPTEST ymm, ymm/m256
To be added.
To be added.
@@ -6842,10 +6204,7 @@
To be added.
To be added.
-
- int _mm256_testz_si256 (__m256i a, __m256i b)
- VPTEST ymm, ymm/m256
-
+ int _mm256_testz_si256 (__m256i a, __m256i b)VPTEST ymm, ymm/m256
To be added.
To be added.
@@ -6876,10 +6235,7 @@
To be added.
To be added.
-
- __m256d _mm256_unpackhi_pd (__m256d a, __m256d b)
- VUNPCKHPD ymm, ymm, ymm/m256
-
+ __m256d _mm256_unpackhi_pd (__m256d a, __m256d b)VUNPCKHPD ymm, ymm, ymm/m256
To be added.
To be added.
@@ -6910,10 +6266,7 @@
To be added.
To be added.
-
- __m256 _mm256_unpackhi_ps (__m256 a, __m256 b)
- VUNPCKHPS ymm, ymm, ymm/m256
-
+ __m256 _mm256_unpackhi_ps (__m256 a, __m256 b)VUNPCKHPS ymm, ymm, ymm/m256
To be added.
To be added.
@@ -6944,10 +6297,7 @@
To be added.
To be added.
-
- __m256d _mm256_unpacklo_pd (__m256d a, __m256d b)
- VUNPCKLPD ymm, ymm, ymm/m256
-
+ __m256d _mm256_unpacklo_pd (__m256d a, __m256d b)VUNPCKLPD ymm, ymm, ymm/m256
To be added.
To be added.
@@ -6978,10 +6328,7 @@
To be added.
To be added.
-
- __m256 _mm256_unpacklo_ps (__m256 a, __m256 b)
- VUNPCKLPS ymm, ymm, ymm/m256
-
+ __m256 _mm256_unpacklo_ps (__m256 a, __m256 b)VUNPCKLPS ymm, ymm, ymm/m256
To be added.
To be added.
@@ -7012,10 +6359,7 @@
To be added.
To be added.
-
- __m256d _mm256_xor_pd (__m256d a, __m256d b)
- VXORPS ymm, ymm, ymm/m256
-
+ __m256d _mm256_xor_pd (__m256d a, __m256d b)VXORPS ymm, ymm, ymm/m256
To be added.
To be added.
@@ -7046,10 +6390,7 @@
To be added.
To be added.
-
- __m256 _mm256_xor_ps (__m256 a, __m256 b)
- VXORPS ymm, ymm, ymm/m256
-
+ __m256 _mm256_xor_ps (__m256 a, __m256 b)VXORPS ymm, ymm, ymm/m256
To be added.
To be added.
diff --git a/xml/System.Runtime.Intrinsics.X86/Bmi1+X64.xml b/xml/System.Runtime.Intrinsics.X86/Bmi1+X64.xml
index 5132dfe6a6c..f0fb2bf045c 100644
--- a/xml/System.Runtime.Intrinsics.X86/Bmi1+X64.xml
+++ b/xml/System.Runtime.Intrinsics.X86/Bmi1+X64.xml
@@ -40,13 +40,17 @@
To be added.
To be added.
-
- unsigned __int64 _andn_u64 (unsigned __int64 a, unsigned __int64 b)
- ANDN r64a, r64b, reg/m64
- This intrinisc is only available on 64-bit processes
-
+ unsigned __int64 _andn_u64 (unsigned __int64 a, unsigned __int64 b)ANDN r64a, r64b, reg/m64
To be added.
- To be added.
+
+
+
@@ -71,13 +75,17 @@
To be added.
To be added.
-
- unsigned __int64 _bextr2_u64 (unsigned __int64 a, unsigned __int64 control)
- BEXTR r64a, reg/m64, r64b
- This intrinisc is only available on 64-bit processes
-
+ unsigned __int64 _bextr2_u64 (unsigned __int64 a, unsigned __int64 control)BEXTR r64a, reg/m64, r64b
To be added.
- To be added.
+
+
+
@@ -104,13 +112,17 @@
To be added.
To be added.
To be added.
-
- unsigned __int64 _bextr_u64 (unsigned __int64 a, unsigned int start, unsigned int len)
- BEXTR r64a, reg/m64, r64b
- This intrinisc is only available on 64-bit processes
-
+ unsigned __int64 _bextr_u64 (unsigned __int64 a, unsigned int start, unsigned int len)BEXTR r64a, reg/m64, r64b
To be added.
- To be added.
+
+
+
@@ -133,13 +145,17 @@
To be added.
-
- unsigned __int64 _blsi_u64 (unsigned __int64 a)
- BLSI reg, reg/m64
- This intrinisc is only available on 64-bit processes
-
+ unsigned __int64 _blsi_u64 (unsigned __int64 a)BLSI reg, reg/m64
To be added.
- To be added.
+
+
+
@@ -162,13 +178,17 @@
To be added.
-
- unsigned __int64 _blsmsk_u64 (unsigned __int64 a)
- BLSMSK reg, reg/m64
- This intrinisc is only available on 64-bit processes
-
+ unsigned __int64 _blsmsk_u64 (unsigned __int64 a)BLSMSK reg, reg/m64
To be added.
- To be added.
+
+
+
@@ -212,13 +232,17 @@
To be added.
-
- unsigned __int64 _blsr_u64 (unsigned __int64 a)
- BLSR reg, reg/m64
- This intrinisc is only available on 64-bit processes
-
+ unsigned __int64 _blsr_u64 (unsigned __int64 a)BLSR reg, reg/m64
To be added.
- To be added.
+
+
+
@@ -241,13 +265,17 @@
To be added.
-
- __int64 _mm_tzcnt_64 (unsigned __int64 a)
- TZCNT reg, reg/m64
- This intrinisc is only available on 64-bit processes
-
+ __int64 _mm_tzcnt_64 (unsigned __int64 a)TZCNT reg, reg/m64
To be added.
- To be added.
+
+
+
diff --git a/xml/System.Runtime.Intrinsics.X86/Bmi1.xml b/xml/System.Runtime.Intrinsics.X86/Bmi1.xml
index 9c482f3be75..009377a94e5 100644
--- a/xml/System.Runtime.Intrinsics.X86/Bmi1.xml
+++ b/xml/System.Runtime.Intrinsics.X86/Bmi1.xml
@@ -19,9 +19,7 @@
-
- This class provides access to Intel BMI1 hardware instructions via intrinsics
-
+ This class provides access to Intel BMI1 hardware instructions via intrinsics.
To be added.
@@ -47,10 +45,7 @@
To be added.
To be added.
-
- unsigned int _andn_u32 (unsigned int a, unsigned int b)
- ANDN r32a, r32b, reg/m32
-
+ unsigned int _andn_u32 (unsigned int a, unsigned int b)ANDN r32a, r32b, reg/m32
To be added.
To be added.
@@ -77,10 +72,7 @@
To be added.
To be added.
-
- unsigned int _bextr2_u32 (unsigned int a, unsigned int control)
- BEXTR r32a, reg/m32, r32b
-
+ unsigned int _bextr2_u32 (unsigned int a, unsigned int control)BEXTR r32a, reg/m32, r32b
To be added.
To be added.
@@ -109,10 +101,7 @@
To be added.
To be added.
To be added.
-
- unsigned int _bextr_u32 (unsigned int a, unsigned int start, unsigned int len)
- BEXTR r32a, reg/m32, r32b
-
+ unsigned int _bextr_u32 (unsigned int a, unsigned int start, unsigned int len)BEXTR r32a, reg/m32, r32b
To be added.
To be added.
@@ -137,10 +126,7 @@
To be added.
-
- unsigned int _blsi_u32 (unsigned int a)
- BLSI reg, reg/m32
-
+ unsigned int _blsi_u32 (unsigned int a)BLSI reg, reg/m32
To be added.
To be added.
@@ -165,10 +151,7 @@
To be added.
-
- unsigned int _blsmsk_u32 (unsigned int a)
- BLSMSK reg, reg/m32
-
+ unsigned int _blsmsk_u32 (unsigned int a)BLSMSK reg, reg/m32
To be added.
To be added.
@@ -214,10 +197,7 @@
To be added.
-
- unsigned int _blsr_u32 (unsigned int a)
- BLSR reg, reg/m32
-
+ unsigned int _blsr_u32 (unsigned int a)BLSR reg, reg/m32
To be added.
To be added.
@@ -242,10 +222,7 @@
To be added.
-
- int _mm_tzcnt_32 (unsigned int a)
- TZCNT reg, reg/m32
-
+ int _mm_tzcnt_32 (unsigned int a)TZCNT reg, reg/m32
To be added.
To be added.
diff --git a/xml/System.Runtime.Intrinsics.X86/Bmi2+X64.xml b/xml/System.Runtime.Intrinsics.X86/Bmi2+X64.xml
index f9026654b7c..0bc544101c8 100644
--- a/xml/System.Runtime.Intrinsics.X86/Bmi2+X64.xml
+++ b/xml/System.Runtime.Intrinsics.X86/Bmi2+X64.xml
@@ -61,14 +61,18 @@
To be added.
To be added.
-
- unsigned __int64 _mulx_u64 (unsigned __int64 a, unsigned __int64 b, unsigned __int64* hi)
- MULX r64a, r64b, reg/m64
- The above native signature does not directly correspond to the managed signature.
- This intrinisc is only available on 64-bit processes
-
+ unsigned __int64 _mulx_u64 (unsigned __int64 a, unsigned __int64 b, unsigned __int64* hi)MULX r64a, r64b, reg/m64
To be added.
- To be added.
+
+
+
@@ -94,14 +98,18 @@
To be added.
To be added.
To be added.
-
- unsigned __int64 _mulx_u64 (unsigned __int64 a, unsigned __int64 b, unsigned __int64* hi)
- MULX r64a, r64b, reg/m64
- The above native signature does not directly correspond to the managed signature.
- This intrinisc is only available on 64-bit processes
-
+ unsigned __int64 _mulx_u64 (unsigned __int64 a, unsigned __int64 b, unsigned __int64* hi)MULX r64a, r64b, reg/m64
To be added.
- To be added.
+
+
+
@@ -126,13 +134,17 @@
To be added.
To be added.
-
- unsigned __int64 _pdep_u64 (unsigned __int64 a, unsigned __int64 mask)
- PDEP r64a, r64b, reg/m64
- This intrinisc is only available on 64-bit processes
-
+ unsigned __int64 _pdep_u64 (unsigned __int64 a, unsigned __int64 mask)PDEP r64a, r64b, reg/m64
To be added.
- To be added.
+
+
+
@@ -157,13 +169,17 @@
To be added.
To be added.
-
- unsigned __int64 _pext_u64 (unsigned __int64 a, unsigned __int64 mask)
- PEXT r64a, r64b, reg/m64
- This intrinisc is only available on 64-bit processes
-
+ unsigned __int64 _pext_u64 (unsigned __int64 a, unsigned __int64 mask)PEXT r64a, r64b, reg/m64
To be added.
- To be added.
+
+
+
@@ -188,13 +204,17 @@
To be added.
To be added.
-
- unsigned __int64 _bzhi_u64 (unsigned __int64 a, unsigned int index)
- BZHI r64a, reg/m32, r64b
- This intrinisc is only available on 64-bit processes
-
+ unsigned __int64 _bzhi_u64 (unsigned __int64 a, unsigned int index)BZHI r64a, reg/m32, r64b
To be added.
- To be added.
+
+
+
diff --git a/xml/System.Runtime.Intrinsics.X86/Bmi2.xml b/xml/System.Runtime.Intrinsics.X86/Bmi2.xml
index ab9e5cbb8cd..66c1e96a4ae 100644
--- a/xml/System.Runtime.Intrinsics.X86/Bmi2.xml
+++ b/xml/System.Runtime.Intrinsics.X86/Bmi2.xml
@@ -19,9 +19,7 @@
-
- This class provides access to Intel BMI2 hardware instructions via intrinsics
-
+ This class provides access to Intel BMI2 hardware instructions via intrinsics.
To be added.
@@ -68,13 +66,17 @@
To be added.
To be added.
-
- unsigned int _mulx_u32 (unsigned int a, unsigned int b, unsigned int* hi)
- MULX r32a, r32b, reg/m32
- The above native signature does not directly correspond to the managed signature.
-
+ unsigned int _mulx_u32 (unsigned int a, unsigned int b, unsigned int* hi)MULX r32a, r32b, reg/m32
To be added.
- To be added.
+
+
+
@@ -100,13 +102,17 @@
To be added.
To be added.
To be added.
-
- unsigned int _mulx_u32 (unsigned int a, unsigned int b, unsigned int* hi)
- MULX r32a, r32b, reg/m32
- The above native signature does not directly correspond to the managed signature.
-
+ unsigned int _mulx_u32 (unsigned int a, unsigned int b, unsigned int* hi)MULX r32a, r32b, reg/m32
To be added.
- To be added.
+
+
+
@@ -131,10 +137,7 @@
To be added.
To be added.
-
- unsigned int _pdep_u32 (unsigned int a, unsigned int mask)
- PDEP r32a, r32b, reg/m32
-
+ unsigned int _pdep_u32 (unsigned int a, unsigned int mask)PDEP r32a, r32b, reg/m32
To be added.
To be added.
@@ -161,10 +164,7 @@
To be added.
To be added.
-
- unsigned int _pext_u32 (unsigned int a, unsigned int mask)
- PEXT r32a, r32b, reg/m32
-
+ unsigned int _pext_u32 (unsigned int a, unsigned int mask)PEXT r32a, r32b, reg/m32
To be added.
To be added.
@@ -191,10 +191,7 @@
To be added.
To be added.
-
- unsigned int _bzhi_u32 (unsigned int a, unsigned int index)
- BZHI r32a, reg/m32, r32b
-
+ unsigned int _bzhi_u32 (unsigned int a, unsigned int index)BZHI r32a, reg/m32, r32b
To be added.
To be added.
diff --git a/xml/System.Runtime.Intrinsics.X86/Fma.xml b/xml/System.Runtime.Intrinsics.X86/Fma.xml
index 499407a361d..8adf7bbb354 100644
--- a/xml/System.Runtime.Intrinsics.X86/Fma.xml
+++ b/xml/System.Runtime.Intrinsics.X86/Fma.xml
@@ -19,9 +19,7 @@
-
- This class provides access to Intel FMA hardware instructions via intrinsics
-
+ This class provides access to Intel FMA hardware instructions via intrinsics.
To be added.
@@ -70,10 +68,7 @@
To be added.
To be added.
To be added.
-
- __m128d _mm_fmadd_pd (__m128d a, __m128d b, __m128d c)
- VFMADDPD xmm, xmm, xmm/m128
-
+ __m128d _mm_fmadd_pd (__m128d a, __m128d b, __m128d c)VFMADDPD xmm, xmm, xmm/m128
To be added.
To be added.
@@ -102,10 +97,7 @@
To be added.
To be added.
To be added.
-
- __m128 _mm_fmadd_ps (__m128 a, __m128 b, __m128 c)
- VFMADDPS xmm, xmm, xmm/m128
-
+ __m128 _mm_fmadd_ps (__m128 a, __m128 b, __m128 c)VFMADDPS xmm, xmm, xmm/m128
To be added.
To be added.
@@ -134,10 +126,7 @@
To be added.
To be added.
To be added.
-
- __m256d _mm256_fmadd_pd (__m256d a, __m256d b, __m256d c)
- VFMADDPS ymm, ymm, ymm/m256
-
+ __m256d _mm256_fmadd_pd (__m256d a, __m256d b, __m256d c)VFMADDPS ymm, ymm, ymm/m256
To be added.
To be added.
@@ -166,10 +155,7 @@
To be added.
To be added.
To be added.
-
- __m256 _mm256_fmadd_ps (__m256 a, __m256 b, __m256 c)
- VFMADDPS ymm, ymm, ymm/m256
-
+ __m256 _mm256_fmadd_ps (__m256 a, __m256 b, __m256 c)VFMADDPS ymm, ymm, ymm/m256
To be added.
To be added.
@@ -198,10 +184,7 @@
To be added.
To be added.
To be added.
-
- __m128d _mm_fnmadd_pd (__m128d a, __m128d b, __m128d c)
- VFNMADDPD xmm, xmm, xmm/m128
-
+ __m128d _mm_fnmadd_pd (__m128d a, __m128d b, __m128d c)VFNMADDPD xmm, xmm, xmm/m128
To be added.
To be added.
@@ -230,10 +213,7 @@
To be added.
To be added.
To be added.
-
- __m128 _mm_fnmadd_ps (__m128 a, __m128 b, __m128 c)
- VFNMADDPS xmm, xmm, xmm/m128
-
+ __m128 _mm_fnmadd_ps (__m128 a, __m128 b, __m128 c)VFNMADDPS xmm, xmm, xmm/m128
To be added.
To be added.
@@ -262,10 +242,7 @@
To be added.
To be added.
To be added.
-
- __m256d _mm256_fnmadd_pd (__m256d a, __m256d b, __m256d c)
- VFNMADDPD ymm, ymm, ymm/m256
-
+ __m256d _mm256_fnmadd_pd (__m256d a, __m256d b, __m256d c)VFNMADDPD ymm, ymm, ymm/m256
To be added.
To be added.
@@ -294,10 +271,7 @@
To be added.
To be added.
To be added.
-
- __m256 _mm256_fnmadd_ps (__m256 a, __m256 b, __m256 c)
- VFNMADDPS ymm, ymm, ymm/m256
-
+ __m256 _mm256_fnmadd_ps (__m256 a, __m256 b, __m256 c)VFNMADDPS ymm, ymm, ymm/m256
To be added.
To be added.
@@ -326,10 +300,7 @@
To be added.
To be added.
To be added.
-
- __m128d _mm_fnmadd_sd (__m128d a, __m128d b, __m128d c)
- VFNMADDSD xmm, xmm, xmm/m64
-
+ __m128d _mm_fnmadd_sd (__m128d a, __m128d b, __m128d c)VFNMADDSD xmm, xmm, xmm/m64
To be added.
To be added.
@@ -358,10 +329,7 @@
To be added.
To be added.
To be added.
-
- __m128 _mm_fnmadd_ss (__m128 a, __m128 b, __m128 c)
- VFNMADDSS xmm, xmm, xmm/m32
-
+ __m128 _mm_fnmadd_ss (__m128 a, __m128 b, __m128 c)VFNMADDSS xmm, xmm, xmm/m32
To be added.
To be added.
@@ -390,10 +358,7 @@
To be added.
To be added.
To be added.
-
- __m128d _mm_fmadd_sd (__m128d a, __m128d b, __m128d c)
- VFMADDSS xmm, xmm, xmm/m64
-
+ __m128d _mm_fmadd_sd (__m128d a, __m128d b, __m128d c)VFMADDSS xmm, xmm, xmm/m64
To be added.
To be added.
@@ -422,10 +387,7 @@
To be added.
To be added.
To be added.
-
- __m128 _mm_fmadd_ss (__m128 a, __m128 b, __m128 c)
- VFMADDSS xmm, xmm, xmm/m32
-
+ __m128 _mm_fmadd_ss (__m128 a, __m128 b, __m128 c)VFMADDSS xmm, xmm, xmm/m32
To be added.
To be added.
@@ -454,10 +416,7 @@
To be added.
To be added.
To be added.
-
- __m128d _mm_fmaddsub_pd (__m128d a, __m128d b, __m128d c)
- VFMADDSUBPD xmm, xmm, xmm/m128
-
+ __m128d _mm_fmaddsub_pd (__m128d a, __m128d b, __m128d c)VFMADDSUBPD xmm, xmm, xmm/m128
To be added.
To be added.
@@ -486,10 +445,7 @@
To be added.
To be added.
To be added.
-
- __m128 _mm_fmaddsub_ps (__m128 a, __m128 b, __m128 c)
- VFMADDSUBPS xmm, xmm, xmm/m128
-
+ __m128 _mm_fmaddsub_ps (__m128 a, __m128 b, __m128 c)VFMADDSUBPS xmm, xmm, xmm/m128
To be added.
To be added.
@@ -518,10 +474,7 @@
To be added.
To be added.
To be added.
-
- __m256d _mm256_fmaddsub_pd (__m256d a, __m256d b, __m256d c)
- VFMADDSUBPD ymm, ymm, ymm/m256
-
+ __m256d _mm256_fmaddsub_pd (__m256d a, __m256d b, __m256d c)VFMADDSUBPD ymm, ymm, ymm/m256
To be added.
To be added.
@@ -550,10 +503,7 @@
To be added.
To be added.
To be added.
-
- __m256 _mm256_fmaddsub_ps (__m256 a, __m256 b, __m256 c)
- VFMADDSUBPS ymm, ymm, ymm/m256
-
+ __m256 _mm256_fmaddsub_ps (__m256 a, __m256 b, __m256 c)VFMADDSUBPS ymm, ymm, ymm/m256
To be added.
To be added.
@@ -582,10 +532,7 @@
To be added.
To be added.
To be added.
-
- __m128d _mm_fmsub_pd (__m128d a, __m128d b, __m128d c)
- VFMSUBPS xmm, xmm, xmm/m128
-
+ __m128d _mm_fmsub_pd (__m128d a, __m128d b, __m128d c)VFMSUBPS xmm, xmm, xmm/m128
To be added.
To be added.
@@ -614,10 +561,7 @@
To be added.
To be added.
To be added.
-
- __m128 _mm_fmsub_ps (__m128 a, __m128 b, __m128 c)
- VFMSUBPS xmm, xmm, xmm/m128
-
+ __m128 _mm_fmsub_ps (__m128 a, __m128 b, __m128 c)VFMSUBPS xmm, xmm, xmm/m128
To be added.
To be added.
@@ -646,10 +590,7 @@
To be added.
To be added.
To be added.
-
- __m256d _mm256_fmsub_pd (__m256d a, __m256d b, __m256d c)
- VFMSUBPD ymm, ymm, ymm/m256
-
+ __m256d _mm256_fmsub_pd (__m256d a, __m256d b, __m256d c)VFMSUBPD ymm, ymm, ymm/m256
To be added.
To be added.
@@ -678,10 +619,7 @@
To be added.
To be added.
To be added.
-
- __m256 _mm256_fmsub_ps (__m256 a, __m256 b, __m256 c)
- VFMSUBPS ymm, ymm, ymm/m256
-
+ __m256 _mm256_fmsub_ps (__m256 a, __m256 b, __m256 c)VFMSUBPS ymm, ymm, ymm/m256
To be added.
To be added.
@@ -710,10 +648,7 @@
To be added.
To be added.
To be added.
-
- __m128d _mm_fmsubadd_pd (__m128d a, __m128d b, __m128d c)
- VFMSUBADDPD xmm, xmm, xmm/m128
-
+ __m128d _mm_fmsubadd_pd (__m128d a, __m128d b, __m128d c)VFMSUBADDPD xmm, xmm, xmm/m128
To be added.
To be added.
@@ -742,10 +677,7 @@
To be added.
To be added.
To be added.
-
- __m128 _mm_fmsubadd_ps (__m128 a, __m128 b, __m128 c)
- VFMSUBADDPS xmm, xmm, xmm/m128
-
+ __m128 _mm_fmsubadd_ps (__m128 a, __m128 b, __m128 c)VFMSUBADDPS xmm, xmm, xmm/m128
To be added.
To be added.
@@ -774,10 +706,7 @@
To be added.
To be added.
To be added.
-
- __m256d _mm256_fmsubadd_pd (__m256d a, __m256d b, __m256d c)
- VFMSUBADDPD ymm, ymm, ymm/m256
-
+ __m256d _mm256_fmsubadd_pd (__m256d a, __m256d b, __m256d c)VFMSUBADDPD ymm, ymm, ymm/m256
To be added.
To be added.
@@ -806,10 +735,7 @@
To be added.
To be added.
To be added.
-
- __m256 _mm256_fmsubadd_ps (__m256 a, __m256 b, __m256 c)
- VFMSUBADDPS ymm, ymm, ymm/m256
-
+ __m256 _mm256_fmsubadd_ps (__m256 a, __m256 b, __m256 c)VFMSUBADDPS ymm, ymm, ymm/m256
To be added.
To be added.
@@ -838,10 +764,7 @@
To be added.
To be added.
To be added.
-
- __m128d _mm_fnmsub_pd (__m128d a, __m128d b, __m128d c)
- VFNMSUBPD xmm, xmm, xmm/m128
-
+ __m128d _mm_fnmsub_pd (__m128d a, __m128d b, __m128d c)VFNMSUBPD xmm, xmm, xmm/m128
To be added.
To be added.
@@ -870,10 +793,7 @@
To be added.
To be added.
To be added.
-
- __m128 _mm_fnmsub_ps (__m128 a, __m128 b, __m128 c)
- VFNMSUBPS xmm, xmm, xmm/m128
-
+ __m128 _mm_fnmsub_ps (__m128 a, __m128 b, __m128 c)VFNMSUBPS xmm, xmm, xmm/m128
To be added.
To be added.
@@ -902,10 +822,7 @@
To be added.
To be added.
To be added.
-
- __m256d _mm256_fnmsub_pd (__m256d a, __m256d b, __m256d c)
- VFNMSUBPD ymm, ymm, ymm/m256
-
+ __m256d _mm256_fnmsub_pd (__m256d a, __m256d b, __m256d c)VFNMSUBPD ymm, ymm, ymm/m256
To be added.
To be added.
@@ -934,10 +851,7 @@
To be added.
To be added.
To be added.
-
- __m256 _mm256_fnmsub_ps (__m256 a, __m256 b, __m256 c)
- VFNMSUBPS ymm, ymm, ymm/m256
-
+ __m256 _mm256_fnmsub_ps (__m256 a, __m256 b, __m256 c)VFNMSUBPS ymm, ymm, ymm/m256
To be added.
To be added.
@@ -966,10 +880,7 @@
To be added.
To be added.
To be added.
-
- __m128d _mm_fnmsub_sd (__m128d a, __m128d b, __m128d c)
- VFNMSUBSD xmm, xmm, xmm/m64
-
+ __m128d _mm_fnmsub_sd (__m128d a, __m128d b, __m128d c)VFNMSUBSD xmm, xmm, xmm/m64
To be added.
To be added.
@@ -998,10 +909,7 @@
To be added.
To be added.
To be added.
-
- __m128 _mm_fnmsub_ss (__m128 a, __m128 b, __m128 c)
- VFNMSUBSS xmm, xmm, xmm/m32
-
+ __m128 _mm_fnmsub_ss (__m128 a, __m128 b, __m128 c)VFNMSUBSS xmm, xmm, xmm/m32
To be added.
To be added.
@@ -1030,10 +938,7 @@
To be added.
To be added.
To be added.
-
- __m128d _mm_fmsub_sd (__m128d a, __m128d b, __m128d c)
- VFMSUBSD xmm, xmm, xmm/m64
-
+ __m128d _mm_fmsub_sd (__m128d a, __m128d b, __m128d c)VFMSUBSD xmm, xmm, xmm/m64
To be added.
To be added.
@@ -1062,10 +967,7 @@
To be added.
To be added.
To be added.
-
- __m128 _mm_fmsub_ss (__m128 a, __m128 b, __m128 c)
- VFMSUBSS xmm, xmm, xmm/m32
-
+ __m128 _mm_fmsub_ss (__m128 a, __m128 b, __m128 c)VFMSUBSS xmm, xmm, xmm/m32
To be added.
To be added.
diff --git a/xml/System.Runtime.Intrinsics.X86/Lzcnt+X64.xml b/xml/System.Runtime.Intrinsics.X86/Lzcnt+X64.xml
index a258998c2ed..8d5c8b9971b 100644
--- a/xml/System.Runtime.Intrinsics.X86/Lzcnt+X64.xml
+++ b/xml/System.Runtime.Intrinsics.X86/Lzcnt+X64.xml
@@ -59,13 +59,17 @@
To be added.
-
- unsigned __int64 _lzcnt_u64 (unsigned __int64 a)
- LZCNT reg, reg/m64
- This intrinisc is only available on 64-bit processes
-
+ unsigned __int64 _lzcnt_u64 (unsigned __int64 a)LZCNT reg, reg/m64
To be added.
- To be added.
+
+
+
diff --git a/xml/System.Runtime.Intrinsics.X86/Lzcnt.xml b/xml/System.Runtime.Intrinsics.X86/Lzcnt.xml
index 24c781ffa70..b326281eb7e 100644
--- a/xml/System.Runtime.Intrinsics.X86/Lzcnt.xml
+++ b/xml/System.Runtime.Intrinsics.X86/Lzcnt.xml
@@ -23,9 +23,7 @@
-
- This class provides access to Intel LZCNT hardware instructions via intrinsics
-
+ This class provides access to Intel LZCNT hardware instructions via intrinsics.
To be added.
@@ -78,10 +76,7 @@
To be added.
-
- unsigned int _lzcnt_u32 (unsigned int a)
- LZCNT reg, reg/m32
-
+ unsigned int _lzcnt_u32 (unsigned int a)LZCNT reg, reg/m32
To be added.
To be added.
diff --git a/xml/System.Runtime.Intrinsics.X86/Pclmulqdq.xml b/xml/System.Runtime.Intrinsics.X86/Pclmulqdq.xml
index 96b3ef35618..604908baba5 100644
--- a/xml/System.Runtime.Intrinsics.X86/Pclmulqdq.xml
+++ b/xml/System.Runtime.Intrinsics.X86/Pclmulqdq.xml
@@ -19,9 +19,7 @@
-
- This class provides access to Intel PCLMULQDQ hardware instructions via intrinsics
-
+ This class provides access to Intel PCLMULQDQ hardware instructions via intrinsics.
To be added.
@@ -49,10 +47,7 @@
To be added.
To be added.
To be added.
-
- __m128i _mm_clmulepi64_si128 (__m128i a, __m128i b, const int imm8)
- PCLMULQDQ xmm, xmm/m128, imm8
-
+ __m128i _mm_clmulepi64_si128 (__m128i a, __m128i b, const int imm8)PCLMULQDQ xmm, xmm/m128, imm8
To be added.
To be added.
@@ -81,10 +76,7 @@
To be added.
To be added.
To be added.
-
- __m128i _mm_clmulepi64_si128 (__m128i a, __m128i b, const int imm8)
- PCLMULQDQ xmm, xmm/m128, imm8
-
+ __m128i _mm_clmulepi64_si128 (__m128i a, __m128i b, const int imm8)PCLMULQDQ xmm, xmm/m128, imm8
To be added.
To be added.
diff --git a/xml/System.Runtime.Intrinsics.X86/Popcnt+X64.xml b/xml/System.Runtime.Intrinsics.X86/Popcnt+X64.xml
index 268f4b48860..a1bc3c8bcbf 100644
--- a/xml/System.Runtime.Intrinsics.X86/Popcnt+X64.xml
+++ b/xml/System.Runtime.Intrinsics.X86/Popcnt+X64.xml
@@ -59,13 +59,17 @@
To be added.
-
- __int64 _mm_popcnt_u64 (unsigned __int64 a)
- POPCNT reg64, reg/m64
- This intrinisc is only available on 64-bit processes
-
+ __int64 _mm_popcnt_u64 (unsigned __int64 a)POPCNT reg64, reg/m64
To be added.
- To be added.
+
+
+
diff --git a/xml/System.Runtime.Intrinsics.X86/Popcnt.xml b/xml/System.Runtime.Intrinsics.X86/Popcnt.xml
index 7a44febdd4f..8e3f715da68 100644
--- a/xml/System.Runtime.Intrinsics.X86/Popcnt.xml
+++ b/xml/System.Runtime.Intrinsics.X86/Popcnt.xml
@@ -24,9 +24,7 @@
-
- This class provides access to Intel POPCNT hardware instructions via intrinsics
-
+ This class provides access to Intel POPCNT hardware instructions via intrinsics.
To be added.
@@ -75,10 +73,7 @@
To be added.
-
- int _mm_popcnt_u32 (unsigned int a)
- POPCNT reg, reg/m32
-
+ int _mm_popcnt_u32 (unsigned int a)POPCNT reg, reg/m32
To be added.
To be added.
diff --git a/xml/System.Runtime.Intrinsics.X86/Sse+X64.xml b/xml/System.Runtime.Intrinsics.X86/Sse+X64.xml
index 19354d85300..8da69abd796 100644
--- a/xml/System.Runtime.Intrinsics.X86/Sse+X64.xml
+++ b/xml/System.Runtime.Intrinsics.X86/Sse+X64.xml
@@ -40,13 +40,17 @@
To be added.
To be added.
-
- __m128 _mm_cvtsi64_ss (__m128 a, __int64 b)
- CVTSI2SS xmm, reg/m64
- This intrinisc is only available on 64-bit processes
-
+ __m128 _mm_cvtsi64_ss (__m128 a, __int64 b)CVTSI2SS xmm, reg/m64
To be added.
- To be added.
+
+
+
@@ -69,13 +73,17 @@
To be added.
-
- __int64 _mm_cvtss_si64 (__m128 a)
- CVTSS2SI r64, xmm/m32
- This intrinisc is only available on 64-bit processes
-
+ __int64 _mm_cvtss_si64 (__m128 a)CVTSS2SI r64, xmm/m32
To be added.
- To be added.
+
+
+
@@ -98,13 +106,17 @@
To be added.
-
- __int64 _mm_cvttss_si64 (__m128 a)
- CVTTSS2SI r64, xmm/m32
- This intrinisc is only available on 64-bit processes
-
+ __int64 _mm_cvttss_si64 (__m128 a)CVTTSS2SI r64, xmm/m32
To be added.
- To be added.
+
+
+
diff --git a/xml/System.Runtime.Intrinsics.X86/Sse2+X64.xml b/xml/System.Runtime.Intrinsics.X86/Sse2+X64.xml
index b3fd512d66c..e5b00ad82ab 100644
--- a/xml/System.Runtime.Intrinsics.X86/Sse2+X64.xml
+++ b/xml/System.Runtime.Intrinsics.X86/Sse2+X64.xml
@@ -40,13 +40,17 @@
To be added.
To be added.
-
- __m128d _mm_cvtsi64_sd (__m128d a, __int64 b)
- CVTSI2SD xmm, reg/m64
- This intrinisc is only available on 64-bit processes
-
+ __m128d _mm_cvtsi64_sd (__m128d a, __int64 b)CVTSI2SD xmm, reg/m64
To be added.
- To be added.
+
+
+
@@ -69,13 +73,17 @@
To be added.
-
- __m128i _mm_cvtsi64_si128 (__int64 a)
- MOVQ xmm, reg/m64
- This intrinisc is only available on 64-bit processes
-
+ __m128i _mm_cvtsi64_si128 (__int64 a)MOVQ xmm, reg/m64
To be added.
- To be added.
+
+
+
@@ -98,13 +106,17 @@
To be added.
-
- __m128i _mm_cvtsi64_si128 (__int64 a)
- MOVQ xmm, reg/m64
- This intrinisc is only available on 64-bit processes
-
+ __m128i _mm_cvtsi64_si128 (__int64 a)MOVQ xmm, reg/m64
To be added.
- To be added.
+
+
+
@@ -127,13 +139,17 @@
To be added.
-
- __int64 _mm_cvtsd_si64 (__m128d a)
- CVTSD2SI r64, xmm/m64
- This intrinisc is only available on 64-bit processes
-
+ __int64 _mm_cvtsd_si64 (__m128d a)CVTSD2SI r64, xmm/m64
To be added.
- To be added.
+
+
+
@@ -156,13 +172,17 @@
To be added.
-
- __int64 _mm_cvtsi128_si64 (__m128i a)
- MOVQ reg/m64, xmm
- This intrinisc is only available on 64-bit processes
-
+ __int64 _mm_cvtsi128_si64 (__m128i a)MOVQ reg/m64, xmm
To be added.
- To be added.
+
+
+
@@ -185,13 +205,17 @@
To be added.
-
- __int64 _mm_cvttsd_si64 (__m128d a)
- CVTTSD2SI reg, xmm/m64
- This intrinisc is only available on 64-bit processes
-
+ __int64 _mm_cvttsd_si64 (__m128d a)CVTTSD2SI reg, xmm/m64
To be added.
- To be added.
+
+
+
@@ -214,13 +238,17 @@
To be added.
-
- __int64 _mm_cvtsi128_si64 (__m128i a)
- MOVQ reg/m64, xmm
- This intrinisc is only available on 64-bit processes
-
+ __int64 _mm_cvtsi128_si64 (__m128i a)MOVQ reg/m64, xmm
To be added.
- To be added.
+
+
+
@@ -265,12 +293,17 @@
To be added.
To be added.
-
- void _mm_stream_si64(__int64 *p, __int64 a)
- MOVNTI m64, r64
- This intrinisc is only available on 64-bit processes
-
- To be added.
+ void _mm_stream_si64(__int64 *p, __int64 a)MOVNTI m64, r64
+ To be added.
+
+
+
@@ -294,12 +327,17 @@
To be added.
To be added.
-
- void _mm_stream_si64(__int64 *p, __int64 a)
- MOVNTI m64, r64
- This intrinisc is only available on 64-bit processes
-
- To be added.
+ void _mm_stream_si64(__int64 *p, __int64 a)MOVNTI m64, r64
+ To be added.
+
+
+
diff --git a/xml/System.Runtime.Intrinsics.X86/Sse3.xml b/xml/System.Runtime.Intrinsics.X86/Sse3.xml
index 60f99a17355..5f64b10462f 100644
--- a/xml/System.Runtime.Intrinsics.X86/Sse3.xml
+++ b/xml/System.Runtime.Intrinsics.X86/Sse3.xml
@@ -24,9 +24,7 @@
-
- This class provides access to Intel SSE3 hardware instructions via intrinsics
-
+ This class provides access to Intel SSE3 hardware instructions via intrinsics.
To be added.
@@ -56,10 +54,7 @@
To be added.
To be added.
-
- __m128d _mm_addsub_pd (__m128d a, __m128d b)
- ADDSUBPD xmm, xmm/m128
-
+ __m128d _mm_addsub_pd (__m128d a, __m128d b)ADDSUBPD xmm, xmm/m128
To be added.
To be added.
@@ -90,10 +85,7 @@
To be added.
To be added.
-
- __m128 _mm_addsub_ps (__m128 a, __m128 b)
- ADDSUBPS xmm, xmm/m128
-
+ __m128 _mm_addsub_ps (__m128 a, __m128 b)ADDSUBPS xmm, xmm/m128
To be added.
To be added.
@@ -124,10 +116,7 @@
To be added.
To be added.
-
- __m128d _mm_hadd_pd (__m128d a, __m128d b)
- HADDPD xmm, xmm/m128
-
+ __m128d _mm_hadd_pd (__m128d a, __m128d b)HADDPD xmm, xmm/m128
To be added.
To be added.
@@ -158,10 +147,7 @@
To be added.
To be added.
-
- __m128 _mm_hadd_ps (__m128 a, __m128 b)
- HADDPS xmm, xmm/m128
-
+ __m128 _mm_hadd_ps (__m128 a, __m128 b)HADDPS xmm, xmm/m128
To be added.
To be added.
@@ -192,10 +178,7 @@
To be added.
To be added.
-
- __m128d _mm_hsub_pd (__m128d a, __m128d b)
- HSUBPD xmm, xmm/m128
-
+ __m128d _mm_hsub_pd (__m128d a, __m128d b)HSUBPD xmm, xmm/m128
To be added.
To be added.
@@ -226,10 +209,7 @@
To be added.
To be added.
-
- __m128 _mm_hsub_ps (__m128 a, __m128 b)
- HSUBPS xmm, xmm/m128
-
+ __m128 _mm_hsub_ps (__m128 a, __m128 b)HSUBPS xmm, xmm/m128
To be added.
To be added.
@@ -282,10 +262,7 @@
To be added.
-
- __m128d _mm_loaddup_pd (double const* mem_addr)
- MOVDDUP xmm, m64
-
+ __m128d _mm_loaddup_pd (double const* mem_addr)MOVDDUP xmm, m64
To be added.
To be added.
@@ -425,10 +402,7 @@
To be added.
-
- __m128i _mm_lddqu_si128 (__m128i const* mem_addr)
- LDDQU xmm, m128
-
+ __m128i _mm_lddqu_si128 (__m128i const* mem_addr)LDDQU xmm, m128
To be added.
To be added.
@@ -541,10 +515,7 @@
To be added.
-
- __m128d _mm_movedup_pd (__m128d a)
- MOVDDUP xmm, xmm/m64
-
+ __m128d _mm_movedup_pd (__m128d a)MOVDDUP xmm, xmm/m64
To be added.
To be added.
@@ -573,10 +544,7 @@
To be added.
-
- __m128 _mm_movehdup_ps (__m128 a)
- MOVSHDUP xmm, xmm/m128
-
+ __m128 _mm_movehdup_ps (__m128 a)MOVSHDUP xmm, xmm/m128
To be added.
To be added.
@@ -605,10 +573,7 @@
To be added.
-
- __m128 _mm_moveldup_ps (__m128 a)
- MOVSLDUP xmm, xmm/m128
-
+ __m128 _mm_moveldup_ps (__m128 a)MOVSLDUP xmm, xmm/m128
To be added.
To be added.
diff --git a/xml/System.Runtime.Intrinsics.X86/Sse41+X64.xml b/xml/System.Runtime.Intrinsics.X86/Sse41+X64.xml
index d5b7acb62d1..44d7ebc15ac 100644
--- a/xml/System.Runtime.Intrinsics.X86/Sse41+X64.xml
+++ b/xml/System.Runtime.Intrinsics.X86/Sse41+X64.xml
@@ -40,13 +40,17 @@
To be added.
To be added.
-
- __int64 _mm_extract_epi64 (__m128i a, const int imm8)
- PEXTRQ reg/m64, xmm, imm8
- This intrinisc is only available on 64-bit processes
-
+ __int64 _mm_extract_epi64 (__m128i a, const int imm8)PEXTRQ reg/m64, xmm, imm8
To be added.
- To be added.
+
+
+
@@ -71,13 +75,17 @@
To be added.
To be added.
-
- __int64 _mm_extract_epi64 (__m128i a, const int imm8)
- PEXTRQ reg/m64, xmm, imm8
- This intrinisc is only available on 64-bit processes
-
+ __int64 _mm_extract_epi64 (__m128i a, const int imm8)PEXTRQ reg/m64, xmm, imm8
To be added.
- To be added.
+
+
+
@@ -104,13 +112,17 @@
To be added.
To be added.
To be added.
-
- __m128i _mm_insert_epi64 (__m128i a, __int64 i, const int imm8)
- PINSRQ xmm, reg/m64, imm8
- This intrinisc is only available on 64-bit processes
-
+ __m128i _mm_insert_epi64 (__m128i a, __int64 i, const int imm8)PINSRQ xmm, reg/m64, imm8
To be added.
- To be added.
+
+
+
@@ -137,13 +149,17 @@
To be added.
To be added.
To be added.
-
- __m128i _mm_insert_epi64 (__m128i a, __int64 i, const int imm8)
- PINSRQ xmm, reg/m64, imm8
- This intrinisc is only available on 64-bit processes
-
+ __m128i _mm_insert_epi64 (__m128i a, __int64 i, const int imm8)PINSRQ xmm, reg/m64, imm8
To be added.
- To be added.
+
+
+
diff --git a/xml/System.Runtime.Intrinsics.X86/Sse42+X64.xml b/xml/System.Runtime.Intrinsics.X86/Sse42+X64.xml
index e816fa57e0f..e73f0edbf34 100644
--- a/xml/System.Runtime.Intrinsics.X86/Sse42+X64.xml
+++ b/xml/System.Runtime.Intrinsics.X86/Sse42+X64.xml
@@ -40,13 +40,17 @@
To be added.
To be added.
-
- unsigned __int64 _mm_crc32_u64 (unsigned __int64 crc, unsigned __int64 v)
- CRC32 reg, reg/m64
- This intrinisc is only available on 64-bit processes
-
+ unsigned __int64 _mm_crc32_u64 (unsigned __int64 crc, unsigned __int64 v)CRC32 reg, reg/m64
To be added.
- To be added.
+
+
+
diff --git a/xml/System.Runtime.Intrinsics.X86/Sse42.xml b/xml/System.Runtime.Intrinsics.X86/Sse42.xml
index bd61436151b..459a4d885eb 100644
--- a/xml/System.Runtime.Intrinsics.X86/Sse42.xml
+++ b/xml/System.Runtime.Intrinsics.X86/Sse42.xml
@@ -24,9 +24,7 @@
-
- This class provides access to Intel SSE4.2 hardware instructions via intrinsics
-
+ This class provides access to Intel SSE4.2 hardware instructions via intrinsics.
To be added.
@@ -56,10 +54,7 @@
To be added.
To be added.
-
- __m128i _mm_cmpgt_epi64 (__m128i a, __m128i b)
- PCMPGTQ xmm, xmm/m128
-
+ __m128i _mm_cmpgt_epi64 (__m128i a, __m128i b)PCMPGTQ xmm, xmm/m128
To be added.
To be added.
@@ -90,10 +85,7 @@
To be added.
To be added.
-
- unsigned int _mm_crc32_u8 (unsigned int crc, unsigned char v)
- CRC32 reg, reg/m8
-
+ unsigned int _mm_crc32_u8 (unsigned int crc, unsigned char v)CRC32 reg, reg/m8
To be added.
To be added.
@@ -124,10 +116,7 @@
To be added.
To be added.
-
- unsigned int _mm_crc32_u16 (unsigned int crc, unsigned short v)
- CRC32 reg, reg/m16
-
+ unsigned int _mm_crc32_u16 (unsigned int crc, unsigned short v)CRC32 reg, reg/m16
To be added.
To be added.
@@ -158,10 +147,7 @@
To be added.
To be added.
-
- unsigned int _mm_crc32_u32 (unsigned int crc, unsigned int v)
- CRC32 reg, reg/m32
-
+ unsigned int _mm_crc32_u32 (unsigned int crc, unsigned int v)CRC32 reg, reg/m32
To be added.
To be added.
diff --git a/xml/System.Runtime.Intrinsics.X86/Ssse3.xml b/xml/System.Runtime.Intrinsics.X86/Ssse3.xml
index e5b1a5f9868..a307e04c04c 100644
--- a/xml/System.Runtime.Intrinsics.X86/Ssse3.xml
+++ b/xml/System.Runtime.Intrinsics.X86/Ssse3.xml
@@ -24,9 +24,7 @@
-
- This class provides access to Intel SSSE3 hardware instructions via intrinsics
-
+ This class provides access to Intel SSSE3 hardware instructions via intrinsics.
To be added.
@@ -54,10 +52,7 @@
To be added.
-
- __m128i _mm_abs_epi16 (__m128i a)
- PABSW xmm, xmm/m128
-
+ __m128i _mm_abs_epi16 (__m128i a)PABSW xmm, xmm/m128
To be added.
To be added.
@@ -86,10 +81,7 @@
To be added.
-
- __m128i _mm_abs_epi32 (__m128i a)
- PABSD xmm, xmm/m128
-
+ __m128i _mm_abs_epi32 (__m128i a)PABSD xmm, xmm/m128
To be added.
To be added.
@@ -118,10 +110,7 @@
To be added.
-
- __m128i _mm_abs_epi8 (__m128i a)
- PABSB xmm, xmm/m128
-
+ __m128i _mm_abs_epi8 (__m128i a)PABSB xmm, xmm/m128
To be added.
To be added.
@@ -150,13 +139,17 @@
To be added.
To be added.
To be added.
-
- __m128i _mm_alignr_epi8 (__m128i a, __m128i b, int count)
- PALIGNR xmm, xmm/m128, imm8
- This intrinsic generates PALIGNR that operates over bytes rather than elements of the vectors.
-
+ __m128i _mm_alignr_epi8 (__m128i a, __m128i b, int count)PALIGNR xmm, xmm/m128, imm8
To be added.
- To be added.
+
+
+
@@ -183,13 +176,17 @@
To be added.
To be added.
To be added.
-
- __m128i _mm_alignr_epi8 (__m128i a, __m128i b, int count)
- PALIGNR xmm, xmm/m128, imm8
- This intrinsic generates PALIGNR that operates over bytes rather than elements of the vectors.
-
+ __m128i _mm_alignr_epi8 (__m128i a, __m128i b, int count)PALIGNR xmm, xmm/m128, imm8
To be added.
- To be added.
+
+
+
@@ -216,13 +213,17 @@
To be added.
To be added.
To be added.
-
- __m128i _mm_alignr_epi8 (__m128i a, __m128i b, int count)
- PALIGNR xmm, xmm/m128, imm8
- This intrinsic generates PALIGNR that operates over bytes rather than elements of the vectors.
-
+ __m128i _mm_alignr_epi8 (__m128i a, __m128i b, int count)PALIGNR xmm, xmm/m128, imm8
To be added.
- To be added.
+
+
+
@@ -249,13 +250,17 @@
To be added.
To be added.
To be added.
-
- __m128i _mm_alignr_epi8 (__m128i a, __m128i b, int count)
- PALIGNR xmm, xmm/m128, imm8
- This intrinsic generates PALIGNR that operates over bytes rather than elements of the vectors.
-
+ __m128i _mm_alignr_epi8 (__m128i a, __m128i b, int count)PALIGNR xmm, xmm/m128, imm8
To be added.
- To be added.
+
+
+
@@ -286,10 +291,7 @@
To be added.
To be added.
To be added.
-
- __m128i _mm_alignr_epi8 (__m128i a, __m128i b, int count)
- PALIGNR xmm, xmm/m128, imm8
-
+ __m128i _mm_alignr_epi8 (__m128i a, __m128i b, int count)PALIGNR xmm, xmm/m128, imm8
To be added.
To be added.
@@ -318,13 +320,17 @@
To be added.
To be added.
To be added.
-
- __m128i _mm_alignr_epi8 (__m128i a, __m128i b, int count)
- PALIGNR xmm, xmm/m128, imm8
- This intrinsic generates PALIGNR that operates over bytes rather than elements of the vectors.
-
+ __m128i _mm_alignr_epi8 (__m128i a, __m128i b, int count)PALIGNR xmm, xmm/m128, imm8
To be added.
- To be added.
+
+
+
@@ -351,13 +357,17 @@
To be added.
To be added.
To be added.
-
- __m128i _mm_alignr_epi8 (__m128i a, __m128i b, int count)
- PALIGNR xmm, xmm/m128, imm8
- This intrinsic generates PALIGNR that operates over bytes rather than elements of the vectors.
-
+ __m128i _mm_alignr_epi8 (__m128i a, __m128i b, int count)PALIGNR xmm, xmm/m128, imm8
To be added.
- To be added.
+
+
+
@@ -384,13 +394,17 @@
To be added.
To be added.
To be added.
-
- __m128i _mm_alignr_epi8 (__m128i a, __m128i b, int count)
- PALIGNR xmm, xmm/m128, imm8
- This intrinsic generates PALIGNR that operates over bytes rather than elements of the vectors.
-
+ __m128i _mm_alignr_epi8 (__m128i a, __m128i b, int count)PALIGNR xmm, xmm/m128, imm8
To be added.
- To be added.
+
+
+
@@ -419,10 +433,7 @@
To be added.
To be added.
-
- __m128i _mm_hadd_epi16 (__m128i a, __m128i b)
- PHADDW xmm, xmm/m128
-
+ __m128i _mm_hadd_epi16 (__m128i a, __m128i b)PHADDW xmm, xmm/m128
To be added.
To be added.
@@ -453,10 +464,7 @@
To be added.
To be added.
-
- __m128i _mm_hadd_epi32 (__m128i a, __m128i b)
- PHADDD xmm, xmm/m128
-
+ __m128i _mm_hadd_epi32 (__m128i a, __m128i b)PHADDD xmm, xmm/m128
To be added.
To be added.
@@ -487,10 +495,7 @@
To be added.
To be added.
-
- __m128i _mm_hadds_epi16 (__m128i a, __m128i b)
- PHADDSW xmm, xmm/m128
-
+ __m128i _mm_hadds_epi16 (__m128i a, __m128i b)PHADDSW xmm, xmm/m128
To be added.
To be added.
@@ -521,10 +526,7 @@
To be added.
To be added.
-
- __m128i _mm_hsub_epi16 (__m128i a, __m128i b)
- PHSUBW xmm, xmm/m128
-
+ __m128i _mm_hsub_epi16 (__m128i a, __m128i b)PHSUBW xmm, xmm/m128
To be added.
To be added.
@@ -555,10 +557,7 @@
To be added.
To be added.
-
- __m128i _mm_hsub_epi32 (__m128i a, __m128i b)
- PHSUBD xmm, xmm/m128
-
+ __m128i _mm_hsub_epi32 (__m128i a, __m128i b)PHSUBD xmm, xmm/m128
To be added.
To be added.
@@ -589,10 +588,7 @@
To be added.
To be added.
-
- __m128i _mm_hsubs_epi16 (__m128i a, __m128i b)
- PHSUBSW xmm, xmm/m128
-
+ __m128i _mm_hsubs_epi16 (__m128i a, __m128i b)PHSUBSW xmm, xmm/m128
To be added.
To be added.
@@ -648,10 +644,7 @@
To be added.
To be added.
-
- __m128i _mm_maddubs_epi16 (__m128i a, __m128i b)
- PMADDUBSW xmm, xmm/m128
-
+ __m128i _mm_maddubs_epi16 (__m128i a, __m128i b)PMADDUBSW xmm, xmm/m128
To be added.
To be added.
@@ -682,10 +675,7 @@
To be added.
To be added.
-
- __m128i _mm_mulhrs_epi16 (__m128i a, __m128i b)
- PMULHRSW xmm, xmm/m128
-
+ __m128i _mm_mulhrs_epi16 (__m128i a, __m128i b)PMULHRSW xmm, xmm/m128
To be added.
To be added.
@@ -712,10 +702,7 @@
To be added.
To be added.
-
- __m128i _mm_shuffle_epi8 (__m128i a, __m128i b)
- PSHUFB xmm, xmm/m128
-
+ __m128i _mm_shuffle_epi8 (__m128i a, __m128i b)PSHUFB xmm, xmm/m128
To be added.
To be added.
@@ -746,10 +733,7 @@
To be added.
To be added.
-
- __m128i _mm_shuffle_epi8 (__m128i a, __m128i b)
- PSHUFB xmm, xmm/m128
-
+ __m128i _mm_shuffle_epi8 (__m128i a, __m128i b)PSHUFB xmm, xmm/m128
To be added.
To be added.
@@ -780,10 +764,7 @@
To be added.
To be added.
-
- __m128i _mm_sign_epi16 (__m128i a, __m128i b)
- PSIGNW xmm, xmm/m128
-
+ __m128i _mm_sign_epi16 (__m128i a, __m128i b)PSIGNW xmm, xmm/m128
To be added.
To be added.
@@ -814,10 +795,7 @@
To be added.
To be added.
-
- __m128i _mm_sign_epi32 (__m128i a, __m128i b)
- PSIGND xmm, xmm/m128
-
+ __m128i _mm_sign_epi32 (__m128i a, __m128i b)PSIGND xmm, xmm/m128
To be added.
To be added.
@@ -848,10 +826,7 @@
To be added.
To be added.
-
- __m128i _mm_sign_epi8 (__m128i a, __m128i b)
- PSIGNB xmm, xmm/m128
-
+ __m128i _mm_sign_epi8 (__m128i a, __m128i b)PSIGNB xmm, xmm/m128
To be added.
To be added.