x64 (amd64) (Lista de intrínsecos)

 

Para obtener la documentación más reciente de Visual Studio 2017 RC, consulte Documentación de Visual Studio 2017 RC.

En este documento se enumeran las funciones intrínsecas que el compilador de Visual C++ admite cuando el destino es x64 (también denominado amd64).

Para obtener información sobre funciones intrínsecas individuales, vea estos recursos según corresponda para el procesador de destino:

La tabla siguiente enumera los intrínsecos disponibles en procesadores x64. La columna Tecnología muestra la compatibilidad requerida con el conjunto de instrucciones. Use el intrínseco __cpuid para determinar la compatibilidad con el conjunto de instrucciones en tiempo de ejecución. Si hay dos entradas en una fila, representan puntos de entrada diferentes para el mismo intrínseco. [1] indica que el intrínseco solo está disponible en procesadores AMD. [2] indica que el intrínseco solo está disponible en procesadores Intel. [3] indica que el prototipo es una macro. El encabezado necesario para el prototipo de función aparece en la columna Header. El encabezado intrin.h incluye immintrin.h y ammintrin.h por motivos de simplicidad.

Nombre de función intrínsecaTecnologíaHeaderPrototipo de función
_addcarry_u16intrin.hunsigned char _addcarry_u16(unsigned char c_in,unsigned short src1,unsigned short src2,unsigned short *sum)
_addcarry_u32intrin.hunsigned char _addcarry_u32(unsigned char c_in,unsigned int src1,unsigned int src2,unsigned int *sum)
_addcarry_u64intrin.hunsigned char _addcarry_u64(unsigned char c_in,unsigned __int64 src1,unsigned __int64 src2,unsigned __int64 *sum)
_addcarry_u8intrin.hunsigned char _addcarry_u8(unsigned char c_in,unsigned char src1,unsigned char src2,unsigned char *sum)
_addcarryx_u32ADX [2]immintrin.hunsigned char _addcarryx_u32(unsigned char c_in,unsigned int src1,unsigned int src2,unsigned int *sum)
_addcarryx_u64ADX [2]immintrin.hunsigned char _addcarryx_u64(unsigned char c_in,unsigned __int64 src1,unsigned __int64 src2,unsigned __int64 *sum)
__addgsbyteintrin.hvoid __addgsbyte(unsigned long,unsigned char)
__addgsdwordintrin.hvoid __addgsdword(unsigned long,unsigned int)
__addgsqwordintrin.hvoid __addgsqword(unsigned long,unsigned __int64)
__addgswordintrin.hvoid __addgsword(unsigned long,unsigned short)
_AddressOfReturnAddressintrin.hvoid * _AddressOfReturnAddress(void)
_andn_u32BMI [1]ammintrin.hunsigned int _andn_u32(unsigned int,unsigned int)
_andn_u64BMI [1]ammintrin.hunsigned __int64 _andn_u64(unsigned __int64,unsigned __int64)
_bextr_u32BMIammintrin.h, immintrin.hunsigned int _bextr_u32(unsigned int,unsigned int,unsigned int)
_bextr_u64BMIammintrin.h, immintrin.hunsigned __int64 _bextr_u64(unsigned __int64,unsigned int,unsigned int)
_bextri_u32ABM [1]ammintrin.hunsigned int _bextri_u32(unsigned int,unsigned int)
_bextri_u64ABM [1]ammintrin.hunsigned __int64 _bextri_u64(unsigned __int64,unsigned int)
_BitScanForwardintrin.hBOOLEAN _BitScanForward(OUT ULONG* Index,IN ULONG Mask)
_BitScanForward64intrin.hBOOLEAN _BitScanForward64(OUT ULONG* Index,IN ULONG64 Mask)
_BitScanReverseintrin.hBOOLEAN _BitScanReverse(OUT ULONG* Index,IN ULONG Mask)
_BitScanReverse64intrin.hBOOLEAN _BitScanReverse64(OUT ULONG* Index,IN ULONG64 Mask)
_bittestintrin.hunsigned char _bittest(long const *a,long b)
_bittest64intrin.hunsigned char _bittest64(__int64 const *a,__int64 b)
_bittestandcomplementintrin.hunsigned char _bittestandcomplement(long *a,long b)
_bittestandcomplement64intrin.hunsigned char _bittestandcomplement64(__int64 *a,__int64 b)
_bittestandresetintrin.hunsigned char _bittestandreset(long *a,long b)
_bittestandreset64intrin.hunsigned char _bittestandreset64(__int64 *a,__int64 b)
_bittestandsetintrin.hunsigned char _bittestandset(long *a,long b)
_bittestandset64intrin.hunsigned char _bittestandset64(__int64 *a,__int64 b)
_blcfill_u32ABM [1]ammintrin.hunsigned int _blcfill_u32(unsigned int)
_blcfill_u64ABM [1]ammintrin.hunsigned __int64 _blcfill_u64(unsigned __int64)
_blci_u32ABM [1]ammintrin.hunsigned int _blci_u32(unsigned int)
_blci_u64ABM [1]ammintrin.hunsigned __int64 _blci_u64(unsigned __int64)
_blcic_u32ABM [1]ammintrin.hunsigned int _blcic_u32(unsigned int)
_blcic_u64ABM [1]ammintrin.hunsigned __int64 _blcic_u64(unsigned __int64)
_blcmsk_u32ABM [1]ammintrin.hunsigned int _blcmsk_u32(unsigned int)
_blcmsk_u64ABM [1]ammintrin.hunsigned __int64 _blcmsk_u64(unsigned __int64)
_blcs_u32ABM [1]ammintrin.hunsigned int _blcs_u32(unsigned int)
_blcs_u64ABM [1]ammintrin.hunsigned __int64 _blcs_u64(unsigned __int64)
_blsfill_u32ABM [1]ammintrin.hunsigned int _blsfill_u32(unsigned int)
_blsfill_u64ABM [1]ammintrin.hunsigned __int64 _blsfill_u64(unsigned __int64)
_blsi_u32BMIammintrin.h, immintrin.hunsigned int _blsi_u32(unsigned int)
_blsi_u64BMIammintrin.h, immintrin.hunsigned __int64 _blsi_u64(unsigned __int64)
_blsic_u32ABM [1]ammintrin.hunsigned int _blsic_u32(unsigned int)
_blsic_u64ABM [1]ammintrin.hunsigned __int64 _blsic_u64(unsigned __int64)
_blsmsk_u32BMIammintrin.h, immintrin.hunsigned int _blsmsk_u32(unsigned int)
_blsmsk_u64BMIammintrin.h, immintrin.hunsigned __int64 _blsmsk_u64(unsigned __int64)
_blsr_u32BMIammintrin.h, immintrin.hunsigned int _blsr_u32(unsigned int)
_blsr_u64BMIammintrin.h, immintrin.hunsigned __int64 _blsr_u64(unsigned __int64)
_bzhi_u32BMI [2]immintrin.hunsigned int _bzhi_u32(unsigned int,unsigned int)
_bzhi_u64BMI [2]immintrin.hunsigned __int64 _bzhi_u64(unsigned __int64,unsigned int)
_clacSMAPintrin.hvoid _clac(void)
__cpuidintrin.hvoid __cpuid(int *a,int b)
__cpuidexintrin.hvoid __cpuidex(int *a,int b,int c)
__debugbreakintrin.hvoid __debugbreak(void)
_disableintrin.hvoid _disable(void)
__emulintrin.h__int64 [pascal/cdecl] __emul(int,int)
__emuluintrin.hunsigned __int64 [pascal/cdecl]__emulu(unsigned int,unsigned int)
_enableintrin.hvoid _enable(void)
__fastfailintrin.hvoid __fastfail(unsigned int)
__faststorefenceintrin.hvoid __faststorefence(void)
_fxrstorFXSR [2]immintrin.hvoid _fxrstor(void const*)
_fxrstor64FXSR [2]immintrin.hvoid _fxrstor64(void const*)
_fxsaveFXSR [2]immintrin.hvoid _fxsave(void*)
_fxsave64FXSR [2]immintrin.hvoid _fxsave64(void*)
__getcallerseflagsintrin.h(unsigned int __getcallerseflags())
__haltintrin.hvoid __halt(void)
__inbyteintrin.hunsigned char __inbyte(unsigned short Port)
__inbytestringintrin.hvoid __inbytestring(unsigned short Port,unsigned char *Buffer,unsigned long Count)
__incgsbyteintrin.hvoid __incgsbyte(unsigned long)
__incgsdwordintrin.hvoid __incgsdword(unsigned long)
__incgsqwordintrin.hvoid __incgsqword(unsigned long)
__incgswordintrin.hvoid __incgsword(unsigned long)
__indwordintrin.hunsigned long __indword(unsigned short Port)
__indwordstringintrin.hvoid __indwordstring(unsigned short Port,unsigned long *Buffer,unsigned long Count)
__int2cintrin.hvoid __int2c(void)
_InterlockedAndintrin.hlong _InterlockedAnd(long volatile *, long)
_InterlockedAnd_HLEAcquireHLE [2]immintrin.hlong _InterlockedAnd_HLEAcquire(long volatile *,long)
_InterlockedAnd_HLEReleaseHLE [2]immintrin.hlong _InterlockedAnd_HLERelease(long volatile *,long)
_InterlockedAnd_npintrin.hlong _InterlockedAnd_np(long *,long)
_InterlockedAnd16intrin.hshort _InterlockedAnd16(short volatile *, short)
_InterlockedAnd16_npintrin.hshort _InterlockedAnd16_np(short *,short)
_InterlockedAnd64intrin.h__int64 _InterlockedAnd64(__int64 volatile *, __int64)
_InterlockedAnd64_HLEAcquireHLE [2]immintrin.h__int64 _InterlockedAnd64_HLEAcquire(__int64 volatile *,__int64)
_InterlockedAnd64_HLEReleaseHLE [2]immintrin.h__int64 _InterlockedAnd64_HLERelease(__int64 volatile *,__int64)
_InterlockedAnd64_npintrin.h__int64 _InterlockedAnd64_np(__int64 *,__int64)
_InterlockedAnd8intrin.hchar _InterlockedAnd8(char volatile *, char)
_InterlockedAnd8_npintrin.hchar _InterlockedAnd8_np(char *,char)
_interlockedbittestandresetintrin.hunsigned char _interlockedbittestandreset(long *a,long b)
_interlockedbittestandreset_HLEAcquireHLE [2]immintrin.hunsigned char _interlockedbittestandreset_HLEAcquire(long *a,long b)
_interlockedbittestandreset_HLEReleaseHLE [2]immintrin.hunsigned char _interlockedbittestandreset_HLERelease(long *a,long b)
_interlockedbittestandreset64intrin.hunsigned char _interlockedbittestandreset64(__int64 *a,__int64 b)
_interlockedbittestandreset64_HLEAcquireHLE [2]immintrin.hunsigned char _interlockedbittestandreset64_HLEAcquire(__int64 *a,__int64 b)
_interlockedbittestandreset64_HLEReleaseHLE [2]immintrin.hunsigned char _interlockedbittestandreset64_HLERelease(__int64 *a,__int64 b)
_interlockedbittestandsetintrin.hunsigned char _interlockedbittestandset(long *a,long b)
_interlockedbittestandset_HLEAcquireHLE [2]immintrin.hunsigned char _interlockedbittestandset_HLEAcquire(long *a,long b)
_interlockedbittestandset_HLEReleaseHLE [2]immintrin.hunsigned char _interlockedbittestandset_HLERelease(long *a,long b)
_interlockedbittestandset64intrin.hunsigned char _interlockedbittestandset64(__int64 *a,__int64 b)
_interlockedbittestandset64_HLEAcquireHLE [2]immintrin.hunsigned char _interlockedbittestandset64_HLEAcquire(__int64 *a,__int64 b)
_interlockedbittestandset64_HLEReleaseHLE [2]immintrin.hunsigned char _interlockedbittestandset64_HLERelease(__int64 *a,__int64 b)
_InterlockedCompareExchangeintrin.hlong _InterlockedCompareExchange (long volatile *,long,long)
_InterlockedCompareExchange_HLEAcquireHLE [2]immintrin.hlong _InterlockedCompareExchange_HLEAcquire(long volatile *,long,long)
_InterlockedCompareExchange_HLEReleaseHLE [2]immintrin.hlong _InterlockedCompareExchange_HLERelease(long volatile *,long,long)
_InterlockedCompareExchange_npintrin.hlong _InterlockedCompareExchange_np (long *,long,long)
_InterlockedCompareExchange128intrin.hunsigned char _InterlockedCompareExchange128(__int64 volatile *,__int64,__int64,__int64*)
_InterlockedCompareExchange128_npintrin.hunsigned char _InterlockedCompareExchange128(__int64 volatile *,__int64,__int64,__int64*)
_InterlockedCompareExchange16intrin.hshort _InterlockedCompareExchange16(short volatile *Destination,short Exchange,short Comparand)
_InterlockedCompareExchange16_npintrin.hshort _InterlockedCompareExchange16_np(short volatile *Destination,short Exchange,short Comparand)
_InterlockedCompareExchange64intrin.h__int64 _InterlockedCompareExchange64(__int64 volatile *,__int64,__int64)
_InterlockedCompareExchange64_HLEAcquireHLE [2]immintrin.h__int64 _InterlockedCompareExchange64_HLEAcquire(__int64 volatile *,__int64,__int64)
_InterlockedCompareExchange64_HLEReleaseHLE [2]immintrin.h__int64 _InterlockedCompareExchange64_HLERelease(__int64 volatile *,__int64,__int64)
_InterlockedCompareExchange64_npintrin.h__int64 _InterlockedCompareExchange64_np(__int64 *,__int64,__int64)
_InterlockedCompareExchange8intrin.hchar _InterlockedCompareExchange8(char volatile *Destination,char Exchange,char Comparand)
_InterlockedCompareExchangePointerintrin.hvoid * _InterlockedCompareExchangePointer(void * volatile *, void *, void *)
_InterlockedCompareExchangePointer_HLEAcquireHLE [2]immintrin.hvoid *_InterlockedCompareExchangePointer_HLEAcquire(void *volatile *,void *,void *)
_InterlockedCompareExchangePointer_HLEReleaseHLE [2]immintrin.hvoid *_InterlockedCompareExchangePointer_HLERelease(void *volatile *,void *,void *)
_InterlockedCompareExchangePointer_npintrin.hvoid *_InterlockedCompareExchangePointer_np(void **,void *,void *)
_InterlockedDecrementintrin.hlong _InterlockedDecrement(long volatile *)
_InterlockedDecrement16intrin.hshort _InterlockedDecrement16(short volatile *Addend)
_InterlockedDecrement64intrin.h__int64 _InterlockedDecrement64(__int64 volatile *)
_InterlockedExchangeintrin.hlong _InterlockedExchange(long volatile *,long)
_InterlockedExchange_HLEAcquireHLE [2]immintrin.hlong _InterlockedExchange_HLEAcquire(long volatile *,long)
_InterlockedExchange_HLEReleaseHLE [2]immintrin.hlong _InterlockedExchange_HLERelease(long volatile *,long)
_InterlockedExchange16intrin.hshort _InterlockedExchange16(short volatile *,short)
_InterlockedExchange64intrin.h__int64 _InterlockedExchange64(__int64 volatile *,__int64)
_InterlockedExchange64_HLEAcquireHLE [2]immintrin.h__int64 _InterlockedExchange64_HLEAcquire(__int64 volatile *,__int64)
_InterlockedExchange64_HLEReleaseHLE [2]immintrin.h__int64 _InterlockedExchange64_HLERelease(__int64 volatile *,__int64)
_InterlockedExchange8intrin.hchar _InterlockedExchange8(char volatile *,char)
_InterlockedExchangeAddintrin.hlong _InterlockedExchangeAdd(long volatile *,long)
_InterlockedExchangeAdd_HLEAcquireHLE [2]immintrin.hlong _InterlockedExchangeAdd_HLEAcquire(long volatile *,long)
_InterlockedExchangeAdd_HLEReleaseHLE [2]immintrin.hlong _InterlockedExchangeAdd_HLERelease(long volatile *,long)
_InterlockedExchangeAdd16intrin.hshort _InterlockedExchangeAdd16(short volatile *, short)
_InterlockedExchangeAdd64intrin.h__int64 _InterlockedExchangeAdd64(__int64 volatile *, __int64)
_InterlockedExchangeAdd64_HLEAcquireHLE [2]immintrin.h__int64 _InterlockedExchangeAdd64_HLEAcquire(__int64 volatile *,__int64)
_InterlockedExchangeAdd64_HLEReleaseHLE [2]immintrin.h__int64 _InterlockedExchangeAdd64_HLERelease(__int64 volatile *,__int64)
_InterlockedExchangeAdd8intrin.hchar _InterlockedExchangeAdd8(char volatile *, char)
_InterlockedExchangePointerintrin.hvoid * _InterlockedExchangePointer(void *volatile *,void *)
_InterlockedExchangePointer_HLEAcquireHLE [2]immintrin.hvoid * _InterlockedExchangePointer_HLEAcquire(void *volatile *,void *)
_InterlockedExchangePointer_HLEReleaseHLE [2]immintrin.hvoid * _InterlockedExchangePointer_HLERelease(void *volatile *,void *)
_InterlockedIncrementintrin.hlong _InterlockedIncrement(long volatile *)
_InterlockedIncrement16intrin.hshort _InterlockedIncrement16(short volatile *Addend)
_InterlockedIncrement64intrin.h__int64 _InterlockedIncrement64(__int64 volatile *)
_InterlockedOrintrin.hlong _InterlockedOr(long volatile *, long)
_InterlockedOr_HLEAcquireHLE [2]immintrin.hlong _InterlockedOr_HLEAcquire(long volatile *,long)
_InterlockedOr_HLEReleaseHLE [2]immintrin.hlong _InterlockedOr_HLERelease(long volatile *,long)
_InterlockedOr_npintrin.hlong _InterlockedOr_np(long *,long)
_InterlockedOr16intrin.hshort _InterlockedOr16(short volatile *, short)
_InterlockedOr16_npintrin.hshort _InterlockedOr16_np(short *,short)
_InterlockedOr64intrin.h__int64 _InterlockedOr64(__int64 volatile *, __int64)
_InterlockedOr64_HLEAcquireHLE [2]immintrin.h__int64 _InterlockedOr64_HLEAcquire(__int64 volatile *,__int64)
_InterlockedOr64_HLEReleaseHLE [2]immintrin.h__int64 _InterlockedOr64_HLERelease(__int64 volatile *,__int64)
_InterlockedOr64_npintrin.h__int64 _InterlockedOr64_np(__int64 *,__int64)
_InterlockedOr8intrin.hchar _InterlockedOr8(char volatile *, char)
_InterlockedOr8_npintrin.hchar _InterlockedOr8_np(char *,char)
_InterlockedXorintrin.hlong _InterlockedXor(long volatile *, long)
_InterlockedXor_HLEAcquireHLE [2]immintrin.hlong _InterlockedXor_HLEAcquire(long volatile *,long)
_InterlockedXor_HLEReleaseHLE [2]immintrin.hlong _InterlockedXor_HLERelease(long volatile *,long)
_InterlockedXor_npintrin.hlong _InterlockedXor_np(long *,long)
_InterlockedXor16intrin.hshort _InterlockedXor16(short volatile *, short)
_InterlockedXor16_npintrin.hshort _InterlockedXor16_np(short *,short)
_InterlockedXor64intrin.h__int64 _InterlockedXor64(__int64 volatile *, __int64)
_InterlockedXor64_HLEAcquireHLE [2]immintrin.h__int64 _InterlockedXor64_HLEAcquire(__int64 volatile *,__int64)
_InterlockedXor64_HLEReleaseHLE [2]immintrin.h__int64 _InterlockedXor64_HLERelease(__int64 volatile *,__int64)
_InterlockedXor64_npintrin.h__int64 _InterlockedXor64_np(__int64 *,__int64)
_InterlockedXor8intrin.hchar _InterlockedXor8(char volatile *, char)
_InterlockedXor8_npintrin.hchar _InterlockedXor8_np(char *,char)
__invlpgintrin.hvoid __invlpg(void*)
_invpcidINVPCID [2]immintrin.hvoid _invpcid(unsigned int,void *)
__inwordintrin.hunsigned short __inword(unsigned short Port)
__inwordstringintrin.hvoid __inwordstring(unsigned short Port,unsigned short *Buffer,unsigned long Count)
_lgdtintrin.hvoid _lgdt(void*)
__lidtintrin.hvoid __lidt(void*)
__ll_lshiftintrin.hunsigned __int64 [pascal/cdecl] __ll_lshift(unsigned __int64,int)
__ll_rshiftintrin.h__int64 [pascal/cdecl] __ll_rshift(__int64,int)
__llwpcbLWP [1]ammintrin.hvoid __llwpcb(void *)
_load_be_u16

 _loadbe_i16
MOVBEimmintrin.hunsigned short _load_be_u16(void const*);

short _loadbe_i16(void const*); [3]
_load_be_u32

 _loadbe_i32
MOVBEimmintrin.hunsigned int _load_be_u32(void const*);

int _loadbe_i32(void const*); [3]
_load_be_u64

 _loadbe_i64
MOVBEimmintrin.hunsigned __int64 _load_be_u64(void const*);

__int64 _loadbe_i64(void const*); [3]
__lwpins32LWP [1]ammintrin.hunsigned char __lwpins32(unsigned int,unsigned int,unsigned int)
__lwpins64LWP [1]ammintrin.hunsigned char __lwpins64(unsigned __int64,unsigned int,unsigned int)
__lwpval32LWP [1]ammintrin.hvoid __lwpval32(unsigned int,unsigned int,unsigned int)
__lwpval64LWP [1]ammintrin.hvoid __lwpval64(unsigned __int64,unsigned int,unsigned int)
__lzcntLZCNTintrin.hunsigned int __lzcnt(unsigned int)
_lzcnt_u32BMIammintrin.h, immintrin.hunsigned int _lzcnt_u32(unsigned int)
_lzcnt_u64BMIammintrin.h, immintrin.hunsigned __int64 _lzcnt_u64(unsigned __int64)
__lzcnt16LZCNTintrin.hunsigned short __lzcnt16(unsigned short)
__lzcnt64LZCNTintrin.hunsigned __int64 __lzcnt64(unsigned __int64)
_m_prefetch3DNOWintrin.hvoid _m_prefetch(void*)
_m_prefetchw3DNOWintrin.hvoid _m_prefetchw(void*)
_mm_abs_epi16SSSE3intrin.h__m128i _mm_abs_epi16(__m128i)
_mm_abs_epi32SSSE3intrin.h__m128i _mm_abs_epi32(__m128i)
_mm_abs_epi8SSSE3intrin.h__m128i _mm_abs_epi8(__m128i)
_mm_add_epi16SSE2intrin.h__m128i _mm_add_epi16(__m128i,__m128i)
_mm_add_epi32SSE2intrin.h__m128i _mm_add_epi32(__m128i,__m128i)
_mm_add_epi64SSE2intrin.h__m128i _mm_add_epi64(__m128i,__m128i)
_mm_add_epi8SSE2intrin.h__m128i _mm_add_epi8(__m128i,__m128i)
_mm_add_pdSSE2intrin.h__m128d _mm_add_pd(__m128d,__m128d)
_mm_add_psSSEintrin.h__m128 _mm_add_ps(__m128,__m128)
_mm_add_sdSSE2intrin.h__m128d _mm_add_sd(__m128d,__m128d)
_mm_add_ssSSEintrin.h__m128 _mm_add_ss(__m128,__m128)
_mm_adds_epi16SSE2intrin.h__m128i _mm_adds_epi16(__m128i,__m128i)
_mm_adds_epi8SSE2intrin.h__m128i _mm_adds_epi8(__m128i,__m128i)
_mm_adds_epu16SSE2intrin.h__m128i _mm_adds_epu16(__m128i,__m128i)
_mm_adds_epu8SSE2intrin.h__m128i _mm_adds_epu8(__m128i,__m128i)
_mm_addsub_pdSSE3intrin.h__m128d _mm_addsub_pd(__m128d,__m128d)
_mm_addsub_psSSE3intrin.h__m128 _mm_addsub_ps(__m128,__m128)
_mm_aesdec_si128AESNI [2]immintrin.h__m128i _mm_aesdec_si128( __m128i,__m128i )
_mm_aesdeclast_si128AESNI [2]immintrin.h__m128i _mm_aesdeclast_si128( __m128i,__m128i )
_mm_aesenc_si128AESNI [2]immintrin.h__m128i _mm_aesenc_si128( __m128i,__m128i )
_mm_aesenclast_si128AESNI [2]immintrin.h__m128i _mm_aesenclast_si128( __m128i,__m128i )
_mm_aesimc_si128AESNI [2]immintrin.h__m128i _mm_aesimc_si128 (__m128i )
_mm_aeskeygenassist_si128AESNI [2]immintrin.h__m128i _mm_aeskeygenassist_si128 (__m128i,const int )
_mm_alignr_epi8SSSE3intrin.h__m128i _mm_alignr_epi8(__m128i,__m128i,int)
_mm_and_pdSSE2intrin.h__m128d _mm_and_pd(__m128d,__m128d)
_mm_and_psSSEintrin.h__m128 _mm_and_ps(__m128,__m128)
_mm_and_si128SSE2intrin.h__m128i _mm_and_si128(__m128i,__m128i)
_mm_andnot_pdSSE2intrin.h__m128d _mm_andnot_pd(__m128d,__m128d)
_mm_andnot_psSSEintrin.h__m128 _mm_andnot_ps(__m128,__m128)
_mm_andnot_si128SSE2intrin.h__m128i _mm_andnot_si128(__m128i,__m128i)
_mm_avg_epu16SSE2intrin.h__m128i _mm_avg_epu16(__m128i,__m128i)
_mm_avg_epu8SSE2intrin.h__m128i _mm_avg_epu8(__m128i,__m128i)
_mm_blend_epi16SSE41intrin.h__m128i _mm_blend_epi16 (__m128i,__m128i,const int )
_mm_blend_epi32AVX2 [2]immintrin.h__m128i _mm_blend_epi32(__m128i,__m128i,const int)
_mm_blend_pdSSE41intrin.h__m128d _mm_blend_pd (__m128d,__m128d,const int )
_mm_blend_psSSE41intrin.h__m128 _mm_blend_ps (__m128,__m128,const int )
_mm_blendv_epi8SSE41intrin.h__m128i _mm_blendv_epi8 (__m128i,__m128i,__m128i )
_mm_blendv_pdSSE41intrin.h__m128d _mm_blendv_pd(__m128d,__m128d,__m128d)
_mm_blendv_psSSE41intrin.h__m128 _mm_blendv_ps(__m128,__m128,__m128 )
_mm_broadcast_ssAVX [2]immintrin.h__m128 _mm_broadcast_ss(float const *)
_mm_broadcastb_epi8AVX2 [2]immintrin.h__m128i _mm_broadcastb_epi8(__m128i)
_mm_broadcastd_epi32AVX2 [2]immintrin.h__m128i _mm_broadcastd_epi32(__m128i)
_mm_broadcastq_epi64AVX2 [2]immintrin.h__m128i _mm_broadcastq_epi64(__m128i)
_mm_broadcastsd_pdAVX2 [2]immintrin.h__m128d _mm_broadcastsd_pd(__m128d)
_mm_broadcastss_psAVX2 [2]immintrin.h__m128 _mm_broadcastss_ps(__m128)
_mm_broadcastw_epi16AVX2 [2]immintrin.h__m128i _mm_broadcastw_epi16(__m128i)
_mm_castpd_psSSSE3intrin.h__m128 _mm_castpd_ps(__m128d)
_mm_castpd_si128SSSE3intrin.h__m128i _mm_castpd_si128(__m128d)
_mm_castps_pdSSSE3intrin.h__m128d _mm_castps_pd(__m128)
_mm_castps_si128SSSE3intrin.h__m128i _mm_castps_si128(__m128)
_mm_castsi128_pdSSSE3intrin.h__m128d _mm_castsi128_pd(__m128i)
_mm_castsi128_psSSSE3intrin.h__m128 _mm_castsi128_ps(__m128i)
_mm_clflushSSE2intrin.hvoid _mm_clflush(void const *)
_mm_clmulepi64_si128PCLMULQDQ [2]immintrin.h__m128i _mm_clmulepi64_si128 (__m128i,__m128i,const int )
_mm_cmov_si128XOP [1]ammintrin.h__m128i _mm_cmov_si128(__m128i,__m128i,__m128i)
_mm_cmp_pdAVX [2]immintrin.h__m128d _mm_cmp_pd(__m128d,__m128d,const int)
_mm_cmp_psAVX [2]immintrin.h__m128 _mm_cmp_ps(__m128,__m128,const int)
_mm_cmp_sdAVX [2]immintrin.h__m128d _mm_cmp_sd(__m128d,__m128d,const int)
_mm_cmp_ssAVX [2]immintrin.h__m128 _mm_cmp_ss(__m128,__m128,const int)
_mm_cmpeq_epi16SSE2intrin.h__m128i _mm_cmpeq_epi16(__m128i,__m128i)
_mm_cmpeq_epi32SSE2intrin.h__m128i _mm_cmpeq_epi32(__m128i,__m128i)
_mm_cmpeq_epi64SSE41intrin.h__m128i _mm_cmpeq_epi64(__m128i,__m128i )
_mm_cmpeq_epi8SSE2intrin.h__m128i _mm_cmpeq_epi8(__m128i,__m128i)
_mm_cmpeq_pdSSE2intrin.h__m128d _mm_cmpeq_pd(__m128d,__m128d)
_mm_cmpeq_psSSEintrin.h__m128 _mm_cmpeq_ps(__m128,__m128)
_mm_cmpeq_sdSSE2intrin.h__m128d _mm_cmpeq_sd(__m128d,__m128d)
_mm_cmpeq_ssSSEintrin.h__m128 _mm_cmpeq_ss(__m128,__m128)
_mm_cmpestraSSE42intrin.hint _mm_cmpestra(__m128i,int,__m128i,int,const int)
_mm_cmpestrcSSE42intrin.hint _mm_cmpestrc(__m128i,int,__m128i,int,const int)
_mm_cmpestriSSE42intrin.hint _mm_cmpestri(__m128i,int,__m128i,int,const int)
_mm_cmpestrmSSE42intrin.h__m128i _mm_cmpestrm(__m128i,int,__m128i,int,const int)
_mm_cmpestroSSE42intrin.hint _mm_cmpestro(__m128i,int,__m128i,int,const int)
_mm_cmpestrsSSE42intrin.hint _mm_cmpestrs(__m128i,int,__m128i,int,const int)
_mm_cmpestrzSSE42intrin.hint _mm_cmpestrz(__m128i,int,__m128i,int,const int)
_mm_cmpge_pdSSE2intrin.h__m128d _mm_cmpge_pd(__m128d,__m128d)
_mm_cmpge_psSSEintrin.h__m128 _mm_cmpge_ps(__m128,__m128)
_mm_cmpge_sdSSE2intrin.h__m128d _mm_cmpge_sd(__m128d,__m128d)
_mm_cmpge_ssSSEintrin.h__m128 _mm_cmpge_ss(__m128,__m128)
_mm_cmpgt_epi16SSE2intrin.h__m128i _mm_cmpgt_epi16(__m128i,__m128i)
_mm_cmpgt_epi32SSE2intrin.h__m128i _mm_cmpgt_epi32(__m128i,__m128i)
_mm_cmpgt_epi64SSE42intrin.h__m128i _mm_cmpgt_epi64(__m128i,__m128i )
_mm_cmpgt_epi8SSE2intrin.h__m128i _mm_cmpgt_epi8(__m128i,__m128i)
_mm_cmpgt_pdSSE2intrin.h__m128d _mm_cmpgt_pd(__m128d,__m128d)
_mm_cmpgt_psSSEintrin.h__m128 _mm_cmpgt_ps(__m128,__m128)
_mm_cmpgt_sdSSE2intrin.h__m128d _mm_cmpgt_sd(__m128d,__m128d)
_mm_cmpgt_ssSSEintrin.h__m128 _mm_cmpgt_ss(__m128,__m128)
_mm_cmpistraSSE42intrin.hint _mm_cmpistra(__m128i,__m128i,const int)
_mm_cmpistrcSSE42intrin.hint _mm_cmpistrc(__m128i,__m128i,const int)
_mm_cmpistriSSE42intrin.hint _mm_cmpistri(__m128i,__m128i,const int)
_mm_cmpistrmSSE42intrin.h__m128i _mm_cmpistrm(__m128i,__m128i,const int)
_mm_cmpistroSSE42intrin.hint _mm_cmpistro(__m128i,__m128i,const int)
_mm_cmpistrsSSE42intrin.hint _mm_cmpistrs(__m128i,__m128i,const int)
_mm_cmpistrzSSE42intrin.hint _mm_cmpistrz(__m128i,__m128i,const int)
_mm_cmple_pdSSE2intrin.h__m128d _mm_cmple_pd(__m128d,__m128d)
_mm_cmple_psSSEintrin.h__m128 _mm_cmple_ps(__m128,__m128)
_mm_cmple_sdSSE2intrin.h__m128d _mm_cmple_sd(__m128d,__m128d)
_mm_cmple_ssSSEintrin.h__m128 _mm_cmple_ss(__m128,__m128)
_mm_cmplt_epi16SSE2intrin.h__m128i _mm_cmplt_epi16(__m128i,__m128i)
_mm_cmplt_epi32SSE2intrin.h__m128i _mm_cmplt_epi32(__m128i,__m128i)
_mm_cmplt_epi8SSE2intrin.h__m128i _mm_cmplt_epi8(__m128i,__m128i)
_mm_cmplt_pdSSE2intrin.h__m128d _mm_cmplt_pd(__m128d,__m128d)
_mm_cmplt_psSSEintrin.h__m128 _mm_cmplt_ps(__m128,__m128)
_mm_cmplt_sdSSE2intrin.h__m128d _mm_cmplt_sd(__m128d,__m128d)
_mm_cmplt_ssSSEintrin.h__m128 _mm_cmplt_ss(__m128,__m128)
_mm_cmpneq_pdSSE2intrin.h__m128d _mm_cmpneq_pd(__m128d,__m128d)
_mm_cmpneq_psSSEintrin.h__m128 _mm_cmpneq_ps(__m128,__m128)
_mm_cmpneq_sdSSE2intrin.h__m128d _mm_cmpneq_sd(__m128d,__m128d)
_mm_cmpneq_ssSSEintrin.h__m128 _mm_cmpneq_ss(__m128,__m128)
_mm_cmpnge_pdSSE2intrin.h__m128d _mm_cmpnge_pd(__m128d,__m128d)
_mm_cmpnge_psSSEintrin.h__m128 _mm_cmpnge_ps(__m128,__m128)
_mm_cmpnge_sdSSE2intrin.h__m128d _mm_cmpnge_sd(__m128d,__m128d)
_mm_cmpnge_ssSSEintrin.h__m128 _mm_cmpnge_ss(__m128,__m128)
_mm_cmpngt_pdSSE2intrin.h__m128d _mm_cmpngt_pd(__m128d,__m128d)
_mm_cmpngt_psSSEintrin.h__m128 _mm_cmpngt_ps(__m128,__m128)
_mm_cmpngt_sdSSE2intrin.h__m128d _mm_cmpngt_sd(__m128d,__m128d)
_mm_cmpngt_ssSSEintrin.h__m128 _mm_cmpngt_ss(__m128,__m128)
_mm_cmpnle_pdSSE2intrin.h__m128d _mm_cmpnle_pd(__m128d,__m128d)
_mm_cmpnle_psSSEintrin.h__m128 _mm_cmpnle_ps(__m128,__m128)
_mm_cmpnle_sdSSE2intrin.h__m128d _mm_cmpnle_sd(__m128d,__m128d)
_mm_cmpnle_ssSSEintrin.h__m128 _mm_cmpnle_ss(__m128,__m128)
_mm_cmpnlt_pdSSE2intrin.h__m128d _mm_cmpnlt_pd(__m128d,__m128d)
_mm_cmpnlt_psSSEintrin.h__m128 _mm_cmpnlt_ps(__m128,__m128)
_mm_cmpnlt_sdSSE2intrin.h__m128d _mm_cmpnlt_sd(__m128d,__m128d)
_mm_cmpnlt_ssSSEintrin.h__m128 _mm_cmpnlt_ss(__m128,__m128)
_mm_cmpord_pdSSE2intrin.h__m128d _mm_cmpord_pd(__m128d,__m128d)
_mm_cmpord_psSSEintrin.h__m128 _mm_cmpord_ps(__m128,__m128)
_mm_cmpord_sdSSE2intrin.h__m128d _mm_cmpord_sd(__m128d,__m128d)
_mm_cmpord_ssSSEintrin.h__m128 _mm_cmpord_ss(__m128,__m128)
_mm_cmpunord_pdSSE2intrin.h__m128d _mm_cmpunord_pd(__m128d,__m128d)
_mm_cmpunord_psSSEintrin.h__m128 _mm_cmpunord_ps(__m128,__m128)
_mm_cmpunord_sdSSE2intrin.h__m128d _mm_cmpunord_sd(__m128d,__m128d)
_mm_cmpunord_ssSSEintrin.h__m128 _mm_cmpunord_ss(__m128,__m128)
_mm_com_epi16XOP [1]ammintrin.h__m128i _mm_com_epi16(__m128i,__m128i,int)
_mm_com_epi32XOP [1]ammintrin.h__m128i _mm_com_epi32(__m128i,__m128i,int)
_mm_com_epi64XOP [1]ammintrin.h__m128i _mm_com_epi32(__m128i,__m128i,int)
_mm_com_epi8XOP [1]ammintrin.h__m128i _mm_com_epi8(__m128i,__m128i,int)
_mm_com_epu16XOP [1]ammintrin.h__m128i _mm_com_epu16(__m128i,__m128i,int)
_mm_com_epu32XOP [1]ammintrin.h__m128i _mm_com_epu32(__m128i,__m128i,int)
_mm_com_epu64XOP [1]ammintrin.h__m128i _mm_com_epu32(__m128i,__m128i,int)
_mm_com_epu8XOP [1]ammintrin.h__m128i _mm_com_epu8(__m128i,__m128i,int)
_mm_comieq_sdSSE2intrin.hint _mm_comieq_sd(__m128d,__m128d)
_mm_comieq_ssSSEintrin.hint _mm_comieq_ss(__m128,__m128)
_mm_comige_sdSSE2intrin.hint _mm_comige_sd(__m128d,__m128d)
_mm_comige_ssSSEintrin.hint _mm_comige_ss(__m128,__m128)
_mm_comigt_sdSSE2intrin.hint _mm_comigt_sd(__m128d,__m128d)
_mm_comigt_ssSSEintrin.hint _mm_comigt_ss(__m128,__m128)
_mm_comile_sdSSE2intrin.hint _mm_comile_sd(__m128d,__m128d)
_mm_comile_ssSSEintrin.hint _mm_comile_ss(__m128,__m128)
_mm_comilt_sdSSE2intrin.hint _mm_comilt_sd(__m128d,__m128d)
_mm_comilt_ssSSEintrin.hint _mm_comilt_ss(__m128,__m128)
_mm_comineq_sdSSE2intrin.hint _mm_comineq_sd(__m128d,__m128d)
_mm_comineq_ssSSEintrin.hint _mm_comineq_ss(__m128,__m128)
_mm_crc32_u16SSE42intrin.hunsigned int _mm_crc32_u16(unsigned int,unsigned short)
_mm_crc32_u32SSE42intrin.hunsigned int _mm_crc32_u32(unsigned int,unsigned int)
_mm_crc32_u64SSE42intrin.hunsigned __int64 _mm_crc32_u64(unsigned __int64,unsigned __int64)
_mm_crc32_u8SSE42intrin.hunsigned int _mm_crc32_u8(unsigned int,unsigned char)
_mm_cvt_si2ssSSEintrin.h__m128 _mm_cvt_si2ss(__m128,int)
_mm_cvt_ss2siSSEintrin.hint _mm_cvt_ss2si(__m128)
_mm_cvtepi16_epi32SSE41intrin.h__m128i _mm_cvtepi16_epi32(__m128i )
_mm_cvtepi16_epi64SSE41intrin.h__m128i _mm_cvtepi16_epi64(__m128i )
_mm_cvtepi32_epi64SSE41intrin.h__m128i _mm_cvtepi32_epi64(__m128i )
_mm_cvtepi32_pdSSE2intrin.h__m128d _mm_cvtepi32_pd(__m128i)
_mm_cvtepi32_psSSE2intrin.h__m128 _mm_cvtepi32_ps(__m128i)
_mm_cvtepi8_epi16SSE41intrin.h__m128i _mm_cvtepi8_epi16 (__m128i )
_mm_cvtepi8_epi32SSE41intrin.h__m128i _mm_cvtepi8_epi32 (__m128i )
_mm_cvtepi8_epi64SSE41intrin.h__m128i _mm_cvtepi8_epi64 (__m128i )
_mm_cvtepu16_epi32SSE41intrin.h__m128i _mm_cvtepu16_epi32(__m128i )
_mm_cvtepu16_epi64SSE41intrin.h__m128i _mm_cvtepu16_epi64(__m128i )
_mm_cvtepu32_epi64SSE41intrin.h__m128i _mm_cvtepu32_epi64(__m128i )
_mm_cvtepu8_epi16SSE41intrin.h__m128i _mm_cvtepu8_epi16 (__m128i )
_mm_cvtepu8_epi32SSE41intrin.h__m128i _mm_cvtepu8_epi32 (__m128i )
_mm_cvtepu8_epi64SSE41intrin.h__m128i _mm_cvtepu8_epi64 (__m128i )
_mm_cvtpd_epi32SSE2intrin.h__m128i _mm_cvtpd_epi32(__m128d)
_mm_cvtpd_psSSE2intrin.h__m128 _mm_cvtpd_ps(__m128d)
_mm_cvtph_psF16C [2]immintrin.h__m128 _mm_cvtph_ps(__m128i)
_mm_cvtps_epi32SSE2intrin.h__m128i _mm_cvtps_epi32(__m128)
_mm_cvtps_pdSSE2intrin.h__m128d _mm_cvtps_pd(__m128)
_mm_cvtps_phF16C [2]immintrin.h__m128i _mm_cvtps_ph(__m128,const int)
_mm_cvtsd_f64SSSE3intrin.hdouble _mm_cvtsd_f64(__m128d)
_mm_cvtsd_si32SSE2intrin.hint _mm_cvtsd_si32(__m128d)
_mm_cvtsd_si64SSE2intrin.h__int64 _mm_cvtsd_si64(__m128d)
_mm_cvtsd_si64xSSE2intrin.h__int64 _mm_cvtsd_si64x(__m128d a)
_mm_cvtsd_ssSSE2intrin.h__m128 _mm_cvtsd_ss(__m128,__m128d)
_mm_cvtsi128_si32SSE2intrin.hint _mm_cvtsi128_si32(__m128i)
_mm_cvtsi128_si64SSE2intrin.h__int64 _mm_cvtsi128_si64(__m128i)
_mm_cvtsi128_si64xSSE2intrin.h__int64 _mm_cvtsi128_si64x(__m128i a)
_mm_cvtsi32_sdSSE2intrin.h__m128d _mm_cvtsi32_sd(__m128d,int)
_mm_cvtsi32_si128SSE2intrin.h__m128i _mm_cvtsi32_si128(int)
_mm_cvtsi64_sdSSE2intrin.h__m128d _mm_cvtsi64_sd(__m128d,__int64)
_mm_cvtsi64_si128SSE2intrin.h__m128i _mm_cvtsi64_si128(__int64)
_mm_cvtsi64_ssSSEintrin.h__m128 _mm_cvtsi64_ss(__m128,__int64)
_mm_cvtsi64x_sdSSE2intrin.h__m128d _mm_cvtsi64x_sd(__m128d a,__int64 b)
_mm_cvtsi64x_si128SSE2intrin.h__m128i _mm_cvtsi64x_si128(__int64 a)
_mm_cvtsi64x_ssSSE2intrin.h__m128 _mm_cvtsi64x_ss(__m128 a,__int64 b)
_mm_cvtss_f32SSSE3intrin.hfloat _mm_cvtss_f32(__m128)
_mm_cvtss_sdSSE2intrin.h__m128d _mm_cvtss_sd(__m128d,__m128)
_mm_cvtss_si64SSEintrin.h__int64 _mm_cvtss_si64(__m128)
_mm_cvtss_si64xSSE2intrin.h__int64 _mm_cvtss_si64x(__m128 a)
_mm_cvtt_ss2siSSEintrin.hint _mm_cvtt_ss2si(__m128)
_mm_cvttpd_epi32SSE2intrin.h__m128i _mm_cvttpd_epi32(__m128d)
_mm_cvttps_epi32SSE2intrin.h__m128i _mm_cvttps_epi32(__m128)
_mm_cvttsd_si32SSE2intrin.hint _mm_cvttsd_si32(__m128d)
_mm_cvttsd_si64SSE2intrin.h__int64 _mm_cvttsd_si64(__m128d)
_mm_cvttsd_si64xSSE2intrin.h__int64 _mm_cvttsd_si64x(__m128d a)
_mm_cvttss_si64SSE2intrin.h__int64 _mm_cvttss_si64(__m128)
_mm_cvttss_si64xSSE2intrin.h__int64 _mm_cvttss_si64x(__m128 a)
_mm_div_pdSSE2intrin.h__m128d _mm_div_pd(__m128d,__m128d)
_mm_div_psSSEintrin.h__m128 _mm_div_ps(__m128,__m128)
_mm_div_sdSSE2intrin.h__m128d _mm_div_sd(__m128d,__m128d)
_mm_div_ssSSEintrin.h__m128 _mm_div_ss(__m128,__m128)
_mm_dp_pdSSE41intrin.h__m128d _mm_dp_pd(__m128d,__m128d,const int )
_mm_dp_psSSE41intrin.h__m128 _mm_dp_ps(__m128,__m128,const int )
_mm_extract_epi16SSE2intrin.hint _mm_extract_epi16(__m128i,int)
_mm_extract_epi32SSE41intrin.hint _mm_extract_epi32(__m128i,const int )
_mm_extract_epi64SSE41intrin.h__int64 _mm_extract_epi64(__m128i,const int )
_mm_extract_epi8SSE41intrin.hint _mm_extract_epi8 (__m128i,const int )
_mm_extract_psSSE41intrin.hint _mm_extract_ps(__m128,const int )
_mm_extract_si64SSE4aintrin.h__m128i _mm_extract_si64(__m128i,__m128i)
_mm_extracti_si64SSE4aintrin.h__m128i _mm_extracti_si64(__m128i,int,int)
_mm_fmadd_pdFMA [2]immintrin.h__m128d _mm_fmadd_pd (__m128d a,__m128d b,__m128d c)
_mm_fmadd_psFMA [2]immintrin.h__m128 _mm_fmadd_ps (__m128 a,__m128 b,__m128 c)
_mm_fmadd_sdFMA [2]immintrin.h__m128d _mm_fmadd_sd (__m128d a,__m128d b,__m128d c)
_mm_fmadd_ssFMA [2]immintrin.h__m128 _mm_fmadd_ss (__m128 a,__m128 b,__m128 c)
_mm_fmaddsub_pdFMA [2]immintrin.h__m128d _mm_fmaddsub_pd (__m128d a,__m128d b,__m128d c)
_mm_fmaddsub_psFMA [2]immintrin.h__m128 _mm_fmaddsub_ps (__m128 a,__m128 b,__m128 c)
_mm_fmsub_pdFMA [2]immintrin.h__m128d _mm_fmsub_pd (__m128d a,__m128d b,__m128d c)
_mm_fmsub_psFMA [2]immintrin.h__m128 _mm_fmsub_ps (__m128 a,__m128 b,__m128 c)
_mm_fmsub_sdFMA [2]immintrin.h__m128d _mm_fmsub_sd (__m128d a,__m128d b,__m128d c)
_mm_fmsub_ssFMA [2]immintrin.h__m128 _mm_fmsub_ss (__m128 a,__m128 b,__m128 c)
_mm_fmsubadd_pdFMA [2]immintrin.h__m128d _mm_fmsubadd_pd (__m128d a,__m128d b,__m128d c)
_mm_fmsubadd_psFMA [2]immintrin.h__m128 _mm_fmsubadd_ps (__m128 a,__m128 b,__m128 c)
_mm_fnmadd_pdFMA [2]immintrin.h__m128d _mm_fnmadd_pd (__m128d a,__m128d b,__m128d c)
_mm_fnmadd_psFMA [2]immintrin.h__m128 _mm_fnmadd_ps (__m128 a,__m128 b,__m128 c)
_mm_fnmadd_sdFMA [2]immintrin.h__m128d _mm_fnmadd_sd (__m128d a,__m128d b,__m128d c)
_mm_fnmadd_ssFMA [2]immintrin.h__m128 _mm_fnmadd_ss (__m128 a,__m128 b,__m128 c)
_mm_fnmsub_pdFMA [2]immintrin.h__m128d _mm_fnmsub_pd (__m128d a,__m128d b,__m128d c)
_mm_fnmsub_psFMA [2]immintrin.h__m128 _mm_fnmsub_ps (__m128 a,__m128 b,__m128 c)
_mm_fnmsub_sdFMA [2]immintrin.h__m128d _mm_fnmsub_sd (__m128d a,__m128d b,__m128d c)
_mm_fnmsub_ssFMA [2]immintrin.h__m128 _mm_fnmsub_ss (__m128 a,__m128 b,__m128 c)
_mm_frcz_pdXOP [1]ammintrin.h__m128d _mm_frcz_pd(__m128d)
_mm_frcz_psXOP [1]ammintrin.h__m128 _mm_frcz_ps(__m128)
_mm_frcz_sdXOP [1]ammintrin.h__m128d _mm_frcz_sd(__m128d,__m128d)
_mm_frcz_ssXOP [1]ammintrin.h__m128 _mm_frcz_ss(__m128,__m128)
_mm_getcsrSSEintrin.hunsigned int _mm_getcsr(void)
_mm_hadd_epi16SSSE3intrin.h__m128i _mm_hadd_epi16(__m128i,__m128i)
_mm_hadd_epi32SSSE3intrin.h__m128i _mm_hadd_epi32(__m128i,__m128i)
_mm_hadd_pdSSE3intrin.h__m128d _mm_hadd_pd(__m128d,__m128d)
_mm_hadd_psSSE3intrin.h__m128 _mm_hadd_ps(__m128,__m128)
_mm_haddd_epi16XOP [1]ammintrin.h__m128i _mm_haddd_epi16(__m128i)
_mm_haddd_epi8XOP [1]ammintrin.h__m128i _mm_haddd_epi8(__m128i)
_mm_haddd_epu16XOP [1]ammintrin.h__m128i _mm_haddd_epu16(__m128i)
_mm_haddd_epu8XOP [1]ammintrin.h__m128i _mm_haddd_epu8(__m128i)
_mm_haddq_epi16XOP [1]ammintrin.h__m128i _mm_haddq_epi16(__m128i)
_mm_haddq_epi32XOP [1]ammintrin.h__m128i _mm_haddq_epi32(__m128i)
_mm_haddq_epi8XOP [1]ammintrin.h__m128i _mm_haddq_epi8(__m128i)
_mm_haddq_epu16XOP [1]ammintrin.h__m128i _mm_haddq_epu16(__m128i)
_mm_haddq_epu32XOP [1]ammintrin.h__m128i _mm_haddq_epu32(__m128i)
_mm_haddq_epu8XOP [1]ammintrin.h__m128i _mm_haddq_epu8(__m128i)
_mm_hadds_epi16SSSE3intrin.h__m128i _mm_hadds_epi16(__m128i,__m128i)
_mm_haddw_epi8XOP [1]ammintrin.h__m128i _mm_haddw_epi8(__m128i)
_mm_haddw_epu8XOP [1]ammintrin.h__m128i _mm_haddw_epu8(__m128i)
_mm_hsub_epi16SSSE3intrin.h__m128i _mm_hsub_epi16(__m128i,__m128i)
_mm_hsub_epi32SSSE3intrin.h__m128i _mm_hsub_epi32(__m128i,__m128i)
_mm_hsub_pdSSE3intrin.h__m128d _mm_hsub_pd(__m128d,__m128d)
_mm_hsub_psSSE3intrin.h__m128 _mm_hsub_ps(__m128,__m128)
_mm_hsubd_epi16XOP [1]ammintrin.h__m128i _mm_hsubd_epi16(__m128i)
_mm_hsubq_epi32XOP [1]ammintrin.h__m128i _mm_hsubq_epi32(__m128i)
_mm_hsubs_epi16SSSE3intrin.h__m128i _mm_hsubs_epi16(__m128i,__m128i)
_mm_hsubw_epi8XOP [1]ammintrin.h__m128i _mm_hsubw_epi8(__m128i)
_mm_i32gather_epi32AVX2 [2]immintrin.h__m128i _mm_i32gather_epi32(int const *base,__m128i index,const int scale)
_mm_i32gather_epi64AVX2 [2]immintrin.h__m128i _mm_i32gather_epi64(__int64 const *base,__m128i index,const int scale)
_mm_i32gather_pdAVX2 [2]immintrin.h__m128d _mm_i32gather_pd(double const *base,__m128i index,const int scale)
_mm_i32gather_psAVX2 [2]immintrin.h__m128 _mm_i32gather_ps(float const *base,__m128i index,const int scale)
_mm_i64gather_epi32AVX2 [2]immintrin.h__m128i _mm_i64gather_epi32(int const *base,__m128i index,const int scale)
_mm_i64gather_epi64AVX2 [2]immintrin.h__m128i _mm_i64gather_epi64(__int64 const *base,__m128i index,const int scale)
_mm_i64gather_pdAVX2 [2]immintrin.h__m128d _mm_i64gather_pd(double const *base,__m128i index,const int scale)
_mm_i64gather_psAVX2 [2]immintrin.h__m128 _mm_i64gather_ps(float const *base,__m128i index,const int scale)
_mm_insert_epi16SSE2intrin.h__m128i _mm_insert_epi16(__m128i,int,int)
_mm_insert_epi32SSE41intrin.h__m128i _mm_insert_epi32(__m128i,int,const int )
_mm_insert_epi64SSE41intrin.h__m128i _mm_insert_epi64(__m128i,__int64,const int )
_mm_insert_epi8SSE41intrin.h__m128i _mm_insert_epi8 (__m128i,int,const int )
_mm_insert_psSSE41intrin.h__m128 _mm_insert_ps(__m128,__m128,const int )
_mm_insert_si64SSE4aintrin.h__m128i _mm_insert_si64(__m128i,__m128i)
_mm_inserti_si64SSE4aintrin.h__m128i _mm_inserti_si64(__m128i,__m128i,int,int)
_mm_lddqu_si128SSE3intrin.h__m128i _mm_lddqu_si128(__m128i const*)
_mm_lfenceSSE2intrin.hvoid _mm_lfence(void)
_mm_load_pdSSE2intrin.h__m128d _mm_load_pd(double*)
_mm_load_psSSEintrin.h__m128 _mm_load_ps(float*)
_mm_load_ps1SSEintrin.h__m128 _mm_load_ps1(float*)
_mm_load_sdSSE2intrin.h__m128d _mm_load_sd(double*)
_mm_load_si128SSE2intrin.h__m128i _mm_load_si128(__m128i*)
_mm_load_ssSSEintrin.h__m128 _mm_load_ss(float*)
_mm_load1_pdSSE2intrin.h__m128d _mm_load1_pd(double*)
_mm_loaddup_pdSSE3intrin.h__m128d _mm_loaddup_pd(double const*)
_mm_loadh_pdSSE2intrin.h__m128d _mm_loadh_pd(__m128d,double*)
_mm_loadh_piSSEintrin.h__m128 _mm_loadh_pi(__m128,__m64*)
_mm_loadl_epi64SSE2intrin.h__m128i _mm_loadl_epi64(__m128i*)
_mm_loadl_pdSSE2intrin.h__m128d _mm_loadl_pd(__m128d,double*)
_mm_loadl_piSSEintrin.h__m128 _mm_loadl_pi(__m128,__m64*)
_mm_loadr_pdSSE2intrin.h__m128d _mm_loadr_pd(double*)
_mm_loadr_psSSEintrin.h__m128 _mm_loadr_ps(float*)
_mm_loadu_pdSSE2intrin.h__m128d _mm_loadu_pd(double*)
_mm_loadu_psSSEintrin.h__m128 _mm_loadu_ps(float*)
_mm_loadu_si128SSE2intrin.h__m128i _mm_loadu_si128(__m128i*)
_mm_macc_epi16XOP [1]ammintrin.h__m128i _mm_macc_epi16(__m128i,__m128i,__m128i)
_mm_macc_epi32XOP [1]ammintrin.h__m128i _mm_macc_epi32(__m128i,__m128i,__m128i)
_mm_macc_pdFMA4 [1]ammintrin.h__m128d _mm_macc_pd(__m128d,__m128d,__m128d)
_mm_macc_psFMA4 [1]ammintrin.h__m128 _mm_macc_ps(__m128,__m128,__m128)
_mm_macc_sdFMA4 [1]ammintrin.h__m128d _mm_macc_sd(__m128d,__m128d,__m128d)
_mm_macc_ssFMA4 [1]ammintrin.h__m128 _mm_macc_ss(__m128,__m128,__m128)
_mm_maccd_epi16XOP [1]ammintrin.h__m128i _mm_maccd_epi16(__m128i,__m128i,__m128i)
_mm_macchi_epi32XOP [1]ammintrin.h__m128i _mm_macchi_epi32(__m128i,__m128i,__m128i)
_mm_macclo_epi32XOP [1]ammintrin.h__m128i _mm_macclo_epi32(__m128i,__m128i,__m128i)
_mm_maccs_epi16XOP [1]ammintrin.h__m128i _mm_maccs_epi16(__m128i,__m128i,__m128i)
_mm_maccs_epi32XOP [1]ammintrin.h__m128i _mm_maccs_epi32(__m128i,__m128i,__m128i)
_mm_maccsd_epi16XOP [1]ammintrin.h__m128i _mm_maccsd_epi16(__m128i,__m128i,__m128i)
_mm_maccshi_epi32XOP [1]ammintrin.h__m128i _mm_maccshi_epi32(__m128i,__m128i,__m128i)
_mm_maccslo_epi32XOP [1]ammintrin.h__m128i _mm_maccslo_epi32(__m128i,__m128i,__m128i)
_mm_madd_epi16SSE2intrin.h__m128i _mm_madd_epi16(__m128i,__m128i)
_mm_maddd_epi16XOP [1]ammintrin.h__m128i _mm_maddd_epi16(__m128i,__m128i,__m128i)
_mm_maddsd_epi16XOP [1]ammintrin.h__m128i _mm_maddsd_epi16(__m128i,__m128i,__m128i)
_mm_maddsub_pdFMA4 [1]ammintrin.h__m128d _mm_maddsub_pd(__m128d,__m128d,__m128d)
_mm_maddsub_psFMA4 [1]ammintrin.h__m128 _mm_maddsub_ps(__m128,__m128,__m128)
_mm_maddubs_epi16SSSE3intrin.h__m128i _mm_maddubs_epi16(__m128i,__m128i)
_mm_mask_i32gather_epi32AVX2 [2]immintrin.h__m128i _mm_mask_i32gather_epi32(__m128i src,int const *base,__m128i index,__m128i mask,const int scale)
_mm_mask_i32gather_epi64AVX2 [2]immintrin.h__m128i _mm_mask_i32gather_epi64(__m128i src,__int64 const *base,__m128i index,__m128i mask,const int scale)
_mm_mask_i32gather_pdAVX2 [2]immintrin.h__m128d _mm_mask_i32gather_pd(__m128d src,double const *base,__m128i index,__m128d mask,const int scale)
_mm_mask_i32gather_psAVX2 [2]immintrin.h__m128 _mm_mask_i32gather_ps(__m128 src,float const *base,__m128i index,__m128 mask,const int scale)
_mm_mask_i64gather_epi32AVX2 [2]immintrin.h__m128i _mm_mask_i64gather_epi32(__m128i src,int const *base,__m128i index,__m128i mask,const int scale)
_mm_mask_i64gather_epi64AVX2 [2]immintrin.h__m128i _mm_mask_i64gather_epi64(__m128i src,__int64 const *base,__m128i index,__m128i mask,const int scale)
_mm_mask_i64gather_pdAVX2 [2]immintrin.h__m128d _mm_mask_i64gather_pd(__m128d src,double const *base,__m128i index,__m128d mask,const int scale)
_mm_mask_i64gather_psAVX2 [2]immintrin.h__m128 _mm_mask_i64gather_ps(__m128 src,float const *base,__m128i index,__m128 mask,const int scale)
_mm_maskload_epi32AVX2 [2]immintrin.h__m128i _mm_maskload_epi32(int const *,__m128i)
_mm_maskload_epi64AVX2 [2]immintrin.h__m128i _mm_maskload_epi64( __int64 const *,__m128i)
_mm_maskload_pdAVX [2]immintrin.h__m128d _mm_maskload_pd(double const *,__m128i)
_mm_maskload_psAVX [2]immintrin.h__m128 _mm_maskload_ps(float const *,__m128i)
_mm_maskmoveu_si128SSE2intrin.hvoid _mm_maskmoveu_si128(__m128i,__m128i,char*)
_mm_maskstore_epi32AVX2 [2]immintrin.hvoid _mm_maskstore_epi32(int *,__m128i,__m128i)
_mm_maskstore_epi64AVX2 [2]immintrin.hvoid _mm_maskstore_epi64(__int64 *,__m128i,__m128i)
_mm_maskstore_pdAVX [2]immintrin.hvoid _mm_maskstore_pd(double *,__m128i,__m128d)
_mm_maskstore_psAVX [2]immintrin.hvoid _mm_maskstore_ps(float *,__m128i,__m128)
_mm_max_epi16SSE2intrin.h__m128i _mm_max_epi16(__m128i,__m128i)
_mm_max_epi32SSE41intrin.h__m128i _mm_max_epi32(__m128i,__m128i )
_mm_max_epi8SSE41intrin.h__m128i _mm_max_epi8 (__m128i,__m128i )
_mm_max_epu16SSE41intrin.h__m128i _mm_max_epu16(__m128i,__m128i )
_mm_max_epu32SSE41intrin.h__m128i _mm_max_epu32(__m128i,__m128i )
_mm_max_epu8SSE2intrin.h__m128i _mm_max_epu8(__m128i,__m128i)
_mm_max_pdSSE2intrin.h__m128d _mm_max_pd(__m128d,__m128d)
_mm_max_psSSEintrin.h__m128 _mm_max_ps(__m128,__m128)
_mm_max_sdSSE2intrin.h__m128d _mm_max_sd(__m128d,__m128d)
_mm_max_ssSSEintrin.h__m128 _mm_max_ss(__m128,__m128)
_mm_mfenceSSE2intrin.hvoid _mm_mfence(void)
_mm_min_epi16SSE2intrin.h__m128i _mm_min_epi16(__m128i,__m128i)
_mm_min_epi32SSE41intrin.h__m128i _mm_min_epi32(__m128i,__m128i )
_mm_min_epi8SSE41intrin.h__m128i _mm_min_epi8 (__m128i,__m128i )
_mm_min_epu16SSE41intrin.h__m128i _mm_min_epu16(__m128i,__m128i )
_mm_min_epu32SSE41intrin.h__m128i _mm_min_epu32(__m128i,__m128i )
_mm_min_epu8SSE2intrin.h__m128i _mm_min_epu8(__m128i,__m128i)
_mm_min_pdSSE2intrin.h__m128d _mm_min_pd(__m128d,__m128d)
_mm_min_psSSEintrin.h__m128 _mm_min_ps(__m128,__m128)
_mm_min_sdSSE2intrin.h__m128d _mm_min_sd(__m128d,__m128d)
_mm_min_ssSSEintrin.h__m128 _mm_min_ss(__m128,__m128)
_mm_minpos_epu16SSE41intrin.h__m128i _mm_minpos_epu16(__m128i )
_mm_monitorSSE3intrin.hvoid _mm_monitor(void const*,unsigned int,unsigned int)
_mm_move_epi64SSE2intrin.h__m128i _mm_move_epi64(__m128i)
_mm_move_sdSSE2intrin.h__m128d _mm_move_sd(__m128d,__m128d)
_mm_move_ssSSEintrin.h__m128 _mm_move_ss(__m128,__m128)
_mm_movedup_pdSSE3intrin.h__m128d _mm_movedup_pd(__m128d)
_mm_movehdup_psSSE3intrin.h__m128 _mm_movehdup_ps(__m128)
_mm_movehl_psSSEintrin.h__m128 _mm_movehl_ps(__m128,__m128)
_mm_moveldup_psSSE3intrin.h__m128 _mm_moveldup_ps(__m128)
_mm_movelh_psSSEintrin.h__m128 _mm_movelh_ps(__m128,__m128)
_mm_movemask_epi8SSE2intrin.hint _mm_movemask_epi8(__m128i)
_mm_movemask_pdSSE2intrin.hint _mm_movemask_pd(__m128d)
_mm_movemask_psSSEintrin.hint _mm_movemask_ps(__m128)
_mm_mpsadbw_epu8SSE41intrin.h__m128i _mm_mpsadbw_epu8(__m128i s1,__m128i,const int)
_mm_msub_pdFMA4 [1]ammintrin.h__m128d _mm_msub_pd(__m128d,__m128d,__m128d)
_mm_msub_psFMA4 [1]ammintrin.h__m128 _mm_msub_ps(__m128,__m128,__m128)
_mm_msub_sdFMA4 [1]ammintrin.h__m128d _mm_msub_sd(__m128d,__m128d,__m128d)
_mm_msub_ssFMA4 [1]ammintrin.h__m128 _mm_msub_ss(__m128,__m128,__m128)
_mm_msubadd_pdFMA4 [1]ammintrin.h__m128d _mm_msubadd_pd(__m128d,__m128d,__m128d)
_mm_msubadd_psFMA4 [1]ammintrin.h__m128 _mm_msubadd_ps(__m128,__m128,__m128)
_mm_mul_epi32SSE41intrin.h__m128i _mm_mul_epi32(__m128i,__m128i )
_mm_mul_epu32SSE2intrin.h__m128i _mm_mul_epu32(__m128i,__m128i)
_mm_mul_pdSSE2intrin.h__m128d _mm_mul_pd(__m128d,__m128d)
_mm_mul_psSSEintrin.h__m128 _mm_mul_ps(__m128,__m128)
_mm_mul_sdSSE2intrin.h__m128d _mm_mul_sd(__m128d,__m128d)
_mm_mul_ssSSEintrin.h__m128 _mm_mul_ss(__m128,__m128)
_mm_mulhi_epi16SSE2intrin.h__m128i _mm_mulhi_epi16(__m128i,__m128i)
_mm_mulhi_epu16SSE2intrin.h__m128i _mm_mulhi_epu16(__m128i,__m128i)
_mm_mulhrs_epi16SSSE3intrin.h__m128i _mm_mulhrs_epi16(__m128i,__m128i)
_mm_mullo_epi16SSE2intrin.h__m128i _mm_mullo_epi16(__m128i,__m128i)
_mm_mullo_epi32SSE41intrin.h__m128i _mm_mullo_epi32(__m128i,__m128i )
_mm_mwaitSSE3intrin.hvoid _mm_mwait(unsigned int,unsigned int)
_mm_nmacc_pdFMA4 [1]ammintrin.h__m128d _mm_nmacc_pd(__m128d,__m128d,__m128d)
_mm_nmacc_psFMA4 [1]ammintrin.h__m128 _mm_nmacc_ps(__m128,__m128,__m128)
_mm_nmacc_sdFMA4 [1]ammintrin.h__m128d _mm_nmacc_sd(__m128d,__m128d,__m128d)
_mm_nmacc_ssFMA4 [1]ammintrin.h__m128 _mm_nmacc_ss(__m128,__m128,__m128)
_mm_nmsub_pdFMA4 [1]ammintrin.h__m128d _mm_nmsub_pd(__m128d,__m128d,__m128d)
_mm_nmsub_psFMA4 [1]ammintrin.h__m128 _mm_nmsub_ps(__m128,__m128,__m128)
_mm_nmsub_sdFMA4 [1]ammintrin.h__m128d _mm_nmsub_sd(__m128d,__m128d,__m128d)
_mm_nmsub_ssFMA4 [1]ammintrin.h__m128 _mm_nmsub_ss(__m128,__m128,__m128)
_mm_or_pdSSE2intrin.h__m128d _mm_or_pd(__m128d,__m128d)
_mm_or_psSSEintrin.h__m128 _mm_or_ps(__m128,__m128)
_mm_or_si128SSE2intrin.h__m128i _mm_or_si128(__m128i,__m128i)
_mm_packs_epi16SSE2intrin.h__m128i _mm_packs_epi16(__m128i,__m128i)
_mm_packs_epi32SSE2intrin.h__m128i _mm_packs_epi32(__m128i,__m128i)
_mm_packus_epi16SSE2intrin.h__m128i _mm_packus_epi16(__m128i,__m128i)
_mm_packus_epi32SSE41intrin.h__m128i _mm_packus_epi32(__m128i,__m128i )
_mm_pauseSSE2intrin.hvoid _mm_pause(void)
_mm_perm_epi8XOP [1]ammintrin.h__m128i _mm_perm_epi8(__m128i,__m128i,__m128i)
_mm_permute_pdAVX [2]immintrin.h__m128d _mm_permute_pd(__m128d,int)
_mm_permute_psAVX [2]immintrin.h__m128 _mm_permute_ps(__m128,int)
_mm_permute2_pdXOP [1]ammintrin.h__m128d _mm_permute2_pd(__m128d,__m128d,__m128i,int)
_mm_permute2_psXOP [1]ammintrin.h__m128 _mm_permute2_ps(__m128,__m128,__m128i,int)
_mm_permutevar_pdAVX [2]immintrin.h__m128d _mm_permutevar_pd(__m128d,__m128i)
_mm_permutevar_psAVX [2]immintrin.h__m128 _mm_permutevar_ps(__m128,__m128i)
_mm_popcnt_u32POPCNTintrin.hint _mm_popcnt_u32(unsigned int)
_mm_popcnt_u64POPCNTintrin.h__int64 _mm_popcnt_u64(unsigned __int64)
_mm_prefetchSSEintrin.hvoid _mm_prefetch(char*,int)
_mm_rcp_psSSEintrin.h__m128 _mm_rcp_ps(__m128)
_mm_rcp_ssSSEintrin.h__m128 _mm_rcp_ss(__m128)
_mm_rot_epi16XOP [1]ammintrin.h__m128i _mm_rot_epi16(__m128i,__m128i)
_mm_rot_epi32XOP [1]ammintrin.h__m128i _mm_rot_epi32(__m128i,__m128i)
_mm_rot_epi64XOP [1]ammintrin.h__m128i _mm_rot_epi64(__m128i,__m128i)
_mm_rot_epi8XOP [1]ammintrin.h__m128i _mm_rot_epi8(__m128i,__m128i)
_mm_roti_epi16XOP [1]ammintrin.h__m128i _mm_rot_epi16(__m128i,int)
_mm_roti_epi32XOP [1]ammintrin.h__m128i _mm_rot_epi32(__m128i,int)
_mm_roti_epi64XOP [1]ammintrin.h__m128i _mm_rot_epi64(__m128i,int)
_mm_roti_epi8XOP [1]ammintrin.h__m128i _mm_rot_epi8(__m128i,int)
_mm_round_pdSSE41intrin.h__m128d _mm_round_pd(__m128d,const int )
_mm_round_psSSE41intrin.h__m128 _mm_round_ps(__m128,const int )
_mm_round_sdSSE41intrin.h__m128d _mm_round_sd(__m128d,__m128d,const int )
_mm_round_ssSSE41intrin.h__m128 _mm_round_ss(__m128,__m128,const int )
_mm_rsqrt_psSSEintrin.h__m128 _mm_rsqrt_ps(__m128)
_mm_rsqrt_ssSSEintrin.h__m128 _mm_rsqrt_ss(__m128)
_mm_sad_epu8SSE2intrin.h__m128i _mm_sad_epu8(__m128i,__m128i)
_mm_set_epi16SSE2intrin.h__m128i _mm_set_epi16(short,short,short,short,short,short,short,short)
_mm_set_epi32SSE2intrin.h__m128i _mm_set_epi32(int,int,int,int)
_mm_set_epi64xSSE2intrin.h__m128i _mm_set_epi64x(__int64 i1,__int64 i0)
_mm_set_epi8SSE2intrin.h__m128i _mm_set_epi8(char,char,char,char,char,char,char,char,char,char,char,char,char,char,char,char)
_mm_set_pdSSE2intrin.h__m128d _mm_set_pd(double,double)
_mm_set_psSSEintrin.h__m128 _mm_set_ps(float,float,float,float)
_mm_set_ps1SSEintrin.h__m128 _mm_set_ps1(float)
_mm_set_sdSSE2intrin.h__m128d _mm_set_sd(double)
_mm_set_ssSSEintrin.h__m128 _mm_set_ss(float)
_mm_set1_epi16SSE2intrin.h__m128i _mm_set1_epi16(short)
_mm_set1_epi32SSE2intrin.h__m128i _mm_set1_epi32(int)
_mm_set1_epi64xSSE2intrin.h__m128i _mm_set1_epi64x(__int64 i)
_mm_set1_epi8SSE2intrin.h__m128i _mm_set1_epi8(char)
_mm_set1_pdSSE2intrin.h__m128d _mm_set1_pd(double)
_mm_setcsrSSEintrin.hvoid _mm_setcsr(unsigned int)
_mm_setl_epi64SSE2intrin.h__m128i _mm_setl_epi64(__m128i)
_mm_setr_epi16SSE2intrin.h__m128i _mm_setr_epi16(short,short,short,short,short,short,short,short)
_mm_setr_epi32SSE2intrin.h__m128i _mm_setr_epi32(int,int,int,int)
_mm_setr_epi8SSE2intrin.h__m128i _mm_setr_epi8(char,char,char,char,char,char,char,char,char,char,char,char,char,char,char,char)
_mm_setr_pdSSE2intrin.h__m128d _mm_setr_pd(double,double)
_mm_setr_psSSEintrin.h__m128 _mm_setr_ps(float,float,float,float)
_mm_setzero_pdSSE2intrin.h__m128d _mm_setzero_pd(void)
_mm_setzero_psSSEintrin.h__m128 _mm_setzero_ps(void)
_mm_setzero_si128SSE2intrin.h__m128i _mm_setzero_si128(void)
_mm_sfenceSSEintrin.hvoid _mm_sfence(void)
_mm_sha_epi16XOP [1]ammintrin.h__m128i _mm_sha_epi16(__m128i,__m128i)
_mm_sha_epi32XOP [1]ammintrin.h__m128i _mm_sha_epi32(__m128i,__m128i)
_mm_sha_epi64XOP [1]ammintrin.h__m128i _mm_sha_epi64(__m128i,__m128i)
_mm_sha_epi8XOP [1]ammintrin.h__m128i _mm_sha_epi8(__m128i,__m128i)
_mm_shl_epi16XOP [1]ammintrin.h__m128i _mm_shl_epi16(__m128i,__m128i)
_mm_shl_epi32XOP [1]ammintrin.h__m128i _mm_shl_epi32(__m128i,__m128i)
_mm_shl_epi64XOP [1]ammintrin.h__m128i _mm_shl_epi64(__m128i,__m128i)
_mm_shl_epi8XOP [1]ammintrin.h__m128i _mm_shl_epi8(__m128i,__m128i)
_mm_shuffle_epi32SSE2intrin.h__m128i _mm_shuffle_epi32(__m128i,int)
_mm_shuffle_epi8SSSE3intrin.h__m128i _mm_shuffle_epi8(__m128i,__m128i)
_mm_shuffle_pdSSE2intrin.h__m128d _mm_shuffle_pd(__m128d,__m128d,int)
_mm_shuffle_psSSEintrin.h__m128 _mm_shuffle_ps(__m128,__m128,unsigned int)
_mm_shufflehi_epi16SSE2intrin.h__m128i _mm_shufflehi_epi16(__m128i,int)
_mm_shufflelo_epi16SSE2intrin.h__m128i _mm_shufflelo_epi16(__m128i,int)
_mm_sign_epi16SSSE3intrin.h__m128i _mm_sign_epi16(__m128i,__m128i)
_mm_sign_epi32SSSE3intrin.h__m128i _mm_sign_epi32(__m128i,__m128i)
_mm_sign_epi8SSSE3intrin.h__m128i _mm_sign_epi8(__m128i,__m128i)
_mm_sll_epi16SSE2intrin.h__m128i _mm_sll_epi16(__m128i,__m128i)
_mm_sll_epi32SSE2intrin.h__m128i _mm_sll_epi32(__m128i,__m128i)
_mm_sll_epi64SSE2intrin.h__m128i _mm_sll_epi64(__m128i,__m128i)
_mm_slli_epi16SSE2intrin.h__m128i _mm_slli_epi16(__m128i,int)
_mm_slli_epi32SSE2intrin.h__m128i _mm_slli_epi32(__m128i,int)
_mm_slli_epi64SSE2intrin.h__m128i _mm_slli_epi64(__m128i,int)
_mm_slli_si128SSE2intrin.h__m128i _mm_slli_si128(__m128i,int)
_mm_sllv_epi32AVX2 [2]immintrin.h__m128i _mm_sllv_epi32(__m128i,__m128i)
_mm_sllv_epi64AVX2 [2]immintrin.h__m128i _mm_sllv_epi64(__m128i,__m128i)
_mm_sqrt_pdSSE2intrin.h__m128d _mm_sqrt_pd(__m128d)
_mm_sqrt_psSSEintrin.h__m128 _mm_sqrt_ps(__m128)
_mm_sqrt_sdSSE2intrin.h__m128d _mm_sqrt_sd(__m128d,__m128d)
_mm_sqrt_ssSSEintrin.h__m128 _mm_sqrt_ss(__m128)
_mm_sra_epi16SSE2intrin.h__m128i _mm_sra_epi16(__m128i,__m128i)
_mm_sra_epi32SSE2intrin.h__m128i _mm_sra_epi32(__m128i,__m128i)
_mm_srai_epi16SSE2intrin.h__m128i _mm_srai_epi16(__m128i,int)
_mm_srai_epi32SSE2intrin.h__m128i _mm_srai_epi32(__m128i,int)
_mm_srav_epi32AVX2 [2]immintrin.h__m128i _mm_srav_epi32(__m128i,__m128i)
_mm_srl_epi16SSE2intrin.h__m128i _mm_srl_epi16(__m128i,__m128i)
_mm_srl_epi32SSE2intrin.h__m128i _mm_srl_epi32(__m128i,__m128i)
_mm_srl_epi64SSE2intrin.h__m128i _mm_srl_epi64(__m128i,__m128i)
_mm_srli_epi16SSE2intrin.h__m128i _mm_srli_epi16(__m128i,int)
_mm_srli_epi32SSE2intrin.h__m128i _mm_srli_epi32(__m128i,int)
_mm_srli_epi64SSE2intrin.h__m128i _mm_srli_epi64(__m128i,int)
_mm_srli_si128SSE2intrin.h__m128i _mm_srli_si128(__m128i,int)
_mm_srlv_epi32AVX2 [2]immintrin.h__m128i _mm_srlv_epi32(__m128i,__m128i)
_mm_srlv_epi64AVX2 [2]immintrin.h__m128i _mm_srlv_epi64(__m128i,__m128i)
_mm_store_pdSSE2intrin.hvoid _mm_store_pd(double*,__m128d)
_mm_store_psSSEintrin.hvoid _mm_store_ps(float*,__m128)
_mm_store_ps1SSEintrin.hvoid _mm_store_ps1(float*,__m128)
_mm_store_sdSSE2intrin.hvoid _mm_store_sd(double*,__m128d)
_mm_store_si128SSE2intrin.hvoid _mm_store_si128(__m128i*,__m128i)
_mm_store_ssSSEintrin.hvoid _mm_store_ss(float*,__m128)
_mm_store1_pdSSE2intrin.hvoid _mm_store1_pd(double*,__m128d)
_mm_storeh_pdSSE2intrin.hvoid _mm_storeh_pd(double*,__m128d)
_mm_storeh_piSSEintrin.hvoid _mm_storeh_pi(__m64*,__m128)
_mm_storel_epi64SSE2intrin.hvoid _mm_storel_epi64(__m128i*,__m128i)
_mm_storel_pdSSE2intrin.hvoid _mm_storel_pd(double*,__m128d)
_mm_storel_piSSEintrin.hvoid _mm_storel_pi(__m64*,__m128)
_mm_storer_pdSSE2intrin.hvoid _mm_storer_pd(double*,__m128d)
_mm_storer_psSSEintrin.hvoid _mm_storer_ps(float*,__m128)
_mm_storeu_pdSSE2intrin.hvoid _mm_storeu_pd(double*,__m128d)
_mm_storeu_psSSEintrin.hvoid _mm_storeu_ps(float*,__m128)
_mm_storeu_si128SSE2intrin.hvoid _mm_storeu_si128(__m128i*,__m128i)
_mm_stream_load_si128SSE41intrin.h__m128i _mm_stream_load_si128(__m128i* )
_mm_stream_pdSSE2intrin.hvoid _mm_stream_pd(double*,__m128d)
_mm_stream_psSSEintrin.hvoid _mm_stream_ps(float*,__m128)
_mm_stream_sdSSE4aintrin.hvoid _mm_stream_sd(double*,__m128d)
_mm_stream_si128SSE2intrin.hvoid _mm_stream_si128(__m128i*,__m128i)
_mm_stream_si32SSE2intrin.hvoid _mm_stream_si32(int*,int)
_mm_stream_si64xSSE2intrin.hvoid _mm_stream_si64x(__int64 *,__int64)
_mm_stream_ssSSE4aintrin.hvoid _mm_stream_ss(float*,__m128)
_mm_sub_epi16SSE2intrin.h__m128i _mm_sub_epi16(__m128i,__m128i)
_mm_sub_epi32SSE2intrin.h__m128i _mm_sub_epi32(__m128i,__m128i)
_mm_sub_epi64SSE2intrin.h__m128i _mm_sub_epi64(__m128i,__m128i)
_mm_sub_epi8SSE2intrin.h__m128i _mm_sub_epi8(__m128i,__m128i)
_mm_sub_pdSSE2intrin.h__m128d _mm_sub_pd(__m128d,__m128d)
_mm_sub_psSSEintrin.h__m128 _mm_sub_ps(__m128,__m128)
_mm_sub_sdSSE2intrin.h__m128d _mm_sub_sd(__m128d,__m128d)
_mm_sub_ssSSEintrin.h__m128 _mm_sub_ss(__m128,__m128)
_mm_subs_epi16SSE2intrin.h__m128i _mm_subs_epi16(__m128i,__m128i)
_mm_subs_epi8SSE2intrin.h__m128i _mm_subs_epi8(__m128i,__m128i)
_mm_subs_epu16SSE2intrin.h__m128i _mm_subs_epu16(__m128i,__m128i)
_mm_subs_epu8SSE2intrin.h__m128i _mm_subs_epu8(__m128i,__m128i)
_mm_testc_pdAVX [2]immintrin.hint _mm_testc_pd(__m128d,__m128d)
_mm_testc_psAVX [2]immintrin.hint _mm_testc_ps(__m128,__m128)
_mm_testc_si128SSE41intrin.hint _mm_testc_si128(__m128i,__m128i )
_mm_testnzc_pdAVX [2]immintrin.hint _mm_testnzc_pd(__m128d,__m128d)
_mm_testnzc_psAVX [2]immintrin.hint _mm_testnzc_ps(__m128,__m128)
_mm_testnzc_si128SSE41intrin.hint _mm_testnzc_si128(__m128i,__m128i )
_mm_testz_pdAVX [2]immintrin.hint _mm_testz_pd(__m128d,__m128d)
_mm_testz_psAVX [2]immintrin.hint _mm_testz_ps(__m128,__m128)
_mm_testz_si128SSE41intrin.hint _mm_testz_si128(__m128i,__m128i )
_mm_ucomieq_sdSSE2intrin.hint _mm_ucomieq_sd(__m128d,__m128d)
_mm_ucomieq_ssSSEintrin.hint _mm_ucomieq_ss(__m128,__m128)
_mm_ucomige_sdSSE2intrin.hint _mm_ucomige_sd(__m128d,__m128d)
_mm_ucomige_ssSSEintrin.hint _mm_ucomige_ss(__m128,__m128)
_mm_ucomigt_sdSSE2intrin.hint _mm_ucomigt_sd(__m128d,__m128d)
_mm_ucomigt_ssSSEintrin.hint _mm_ucomigt_ss(__m128,__m128)
_mm_ucomile_sdSSE2intrin.hint _mm_ucomile_sd(__m128d,__m128d)
_mm_ucomile_ssSSEintrin.hint _mm_ucomile_ss(__m128,__m128)
_mm_ucomilt_sdSSE2intrin.hint _mm_ucomilt_sd(__m128d,__m128d)
_mm_ucomilt_ssSSEintrin.hint _mm_ucomilt_ss(__m128,__m128)
_mm_ucomineq_sdSSE2intrin.hint _mm_ucomineq_sd(__m128d,__m128d)
_mm_ucomineq_ssSSEintrin.hint _mm_ucomineq_ss(__m128,__m128)
_mm_unpackhi_epi16SSE2intrin.h__m128i _mm_unpackhi_epi16(__m128i,__m128i)
_mm_unpackhi_epi32SSE2intrin.h__m128i _mm_unpackhi_epi32(__m128i,__m128i)
_mm_unpackhi_epi64SSE2intrin.h__m128i _mm_unpackhi_epi64(__m128i,__m128i)
_mm_unpackhi_epi8SSE2intrin.h__m128i _mm_unpackhi_epi8(__m128i,__m128i)
_mm_unpackhi_pdSSE2intrin.h__m128d _mm_unpackhi_pd(__m128d,__m128d)
_mm_unpackhi_psSSEintrin.h__m128 _mm_unpackhi_ps(__m128,__m128)
_mm_unpacklo_epi16SSE2intrin.h__m128i _mm_unpacklo_epi16(__m128i,__m128i)
_mm_unpacklo_epi32SSE2intrin.h__m128i _mm_unpacklo_epi32(__m128i,__m128i)
_mm_unpacklo_epi64SSE2intrin.h__m128i _mm_unpacklo_epi64(__m128i,__m128i)
_mm_unpacklo_epi8SSE2intrin.h__m128i _mm_unpacklo_epi8(__m128i,__m128i)
_mm_unpacklo_pdSSE2intrin.h__m128d _mm_unpacklo_pd(__m128d,__m128d)
_mm_unpacklo_psSSEintrin.h__m128 _mm_unpacklo_ps(__m128,__m128)
_mm_xor_pdSSE2intrin.h__m128d _mm_xor_pd(__m128d,__m128d)
_mm_xor_psSSEintrin.h__m128 _mm_xor_ps(__m128,__m128)
_mm_xor_si128SSE2intrin.h__m128i _mm_xor_si128(__m128i,__m128i)
_mm256_abs_epi16AVX2 [2]immintrin.h__m256i _mm256_abs_epi16(__m256i)
_mm256_abs_epi32AVX2 [2]immintrin.h__m256i _mm256_abs_epi32(__m256i)
_mm256_abs_epi8AVX2 [2]immintrin.h__m256i _mm256_abs_epi8(__m256i)
_mm256_add_epi16AVX2 [2]immintrin.h__m256i _mm256_add_epi16(__m256i,__m256i)
_mm256_add_epi32AVX2 [2]immintrin.h__m256i _mm256_add_epi32(__m256i,__m256i)
_mm256_add_epi64AVX2 [2]immintrin.h__m256i _mm256_add_epi64(__m256i,__m256i)
_mm256_add_epi8AVX2 [2]immintrin.h__m256i _mm256_add_epi8(__m256i,__m256i)
_mm256_add_pdAVX [2]immintrin.h__m256d _mm256_add_pd(__m256d,__m256d)
_mm256_add_psAVX [2]immintrin.h__m256 _mm256_add_ps(__m256,__m256)
_mm256_adds_epi16AVX2 [2]immintrin.h__m256i _mm256_adds_epi16(__m256i,__m256i)
_mm256_adds_epi8AVX2 [2]immintrin.h__m256i _mm256_adds_epi8(__m256i,__m256i)
_mm256_adds_epu16AVX2 [2]immintrin.h__m256i _mm256_adds_epu16(__m256i,__m256i)
_mm256_adds_epu8AVX2 [2]immintrin.h__m256i _mm256_adds_epu8(__m256i,__m256i)
_mm256_addsub_pdAVX [2]immintrin.h__m256d _mm256_addsub_pd(__m256d,__m256d)
_mm256_addsub_psAVX [2]immintrin.h__m256 _mm256_addsub_ps(__m256,__m256)
_mm256_alignr_epi8AVX2 [2]immintrin.h__m256i _mm256_alignr_epi8(__m256i,__m256i,const int)
_mm256_and_pdAVX [2]immintrin.h__m256d _mm256_and_pd(__m256d,__m256d)
_mm256_and_psAVX [2]immintrin.h__m256 _mm256_and_ps(__m256,__m256)
_mm256_and_si256AVX2 [2]immintrin.h__m256i _mm256_and_si256(__m256i,__m256i)
_mm256_andnot_pdAVX [2]immintrin.h__m256d _mm256_andnot_pd(__m256d,__m256d)
_mm256_andnot_psAVX [2]immintrin.h__m256 _mm256_andnot_ps(__m256,__m256)
_mm256_andnot_si256AVX2 [2]immintrin.h__m256i _mm256_andnot_si256(__m256i,__m256i)
_mm256_avg_epu16AVX2 [2]immintrin.h__m256i _mm256_avg_epu16(__m256i,__m256i)
_mm256_avg_epu8AVX2 [2]immintrin.h__m256i _mm256_avg_epu8(__m256i,__m256i)
_mm256_blend_epi16AVX2 [2]immintrin.h__m256i _mm256_blend_epi16(__m256i,__m256i,const int)
_mm256_blend_epi32AVX2 [2]immintrin.h__m256i _mm256_blend_epi32(__m256i,__m256i,const int)
_mm256_blend_pdAVX [2]immintrin.h__m256d _mm256_blend_pd(__m256d,__m256d,const int)
_mm256_blend_psAVX [2]immintrin.h__m256 _mm256_blend_ps(__m256,__m256,const int)
_mm256_blendv_epi8AVX2 [2]immintrin.h__m256i _mm256_blendv_epi8(__m256i,__m256i,__m256i)
_mm256_blendv_pdAVX [2]immintrin.h__m256d _mm256_blendv_pd(__m256d,__m256d,__m256d)
_mm256_blendv_psAVX [2]immintrin.h__m256 _mm256_blendv_ps(__m256,__m256,__m256)
_mm256_broadcast_pdAVX [2]immintrin.h__m256d _mm256_broadcast_pd(__m128d const *)
_mm256_broadcast_psAVX [2]immintrin.h__m256 _mm256_broadcast_ps(__m128 const *)
_mm256_broadcast_sdAVX [2]immintrin.h__m256d _mm256_broadcast_sd(double const *)
_mm256_broadcast_ssAVX [2]immintrin.h__m256 _mm256_broadcast_ss(float const *)
_mm256_broadcastb_epi8AVX2 [2]immintrin.h__m256i _mm256_broadcastb_epi8 (__m128i)
_mm256_broadcastd_epi32AVX2 [2]immintrin.h__m256i _mm256_broadcastd_epi32(__m128i)
_mm256_broadcastq_epi64AVX2 [2]immintrin.h__m256i _mm256_broadcastq_epi64(__m128i)
_mm256_broadcastsd_pdAVX2 [2]immintrin.h__m256d _mm256_broadcastsd_pd(__m128d)
_mm256_broadcastsi128_si256AVX2 [2]immintrin.h__m256i _mm256_broadcastsi128_si256(__m128i)
_mm256_broadcastss_psAVX2 [2]immintrin.h__m256 _mm256_broadcastss_ps(__m128)
_mm256_broadcastw_epi16AVX2 [2]immintrin.h__m256i _mm256_broadcastw_epi16(__m128i)
_mm256_castpd_psAVX [2]immintrin.h__m256 _mm256_castpd_ps(__m256d)
_mm256_castpd_si256AVX [2]immintrin.h__m256i _mm256_castpd_si256(__m256d)
_mm256_castpd128_pd256AVX [2]immintrin.h__m256d _mm256_castpd128_pd256(__m128d)
_mm256_castpd256_pd128AVX [2]immintrin.h__m128d _mm256_castpd256_pd128(__m256d)
_mm256_castps_pdAVX [2]immintrin.h__m256d _mm256_castps_pd(__m256)
_mm256_castps_si256AVX [2]immintrin.h__m256i _mm256_castps_si256(__m256)
_mm256_castps128_ps256AVX [2]immintrin.h__m256 _mm256_castps128_ps256(__m128)
_mm256_castps256_ps128AVX [2]immintrin.h__m128 _mm256_castps256_ps128(__m256)
_mm256_castsi128_si256AVX [2]immintrin.h__m256i _mm256_castsi128_si256(__m128i)
_mm256_castsi256_pdAVX [2]immintrin.h__m256d _mm256_castsi256_pd(__m256i)
_mm256_castsi256_psAVX [2]immintrin.h__m256 _mm256_castsi256_ps(__m256i)
_mm256_castsi256_si128AVX [2]immintrin.h__m128i _mm256_castsi256_si128(__m256i)
_mm256_cmov_si256XOP [1]ammintrin.h__m256i _mm256_cmov_si256(__m256i,__m256i,__m256i)
_mm256_cmp_pdAVX [2]immintrin.h__m256d _mm256_cmp_pd(__m256d,__m256d,const int)
_mm256_cmp_psAVX [2]immintrin.h__m256 _mm256_cmp_ps(__m256,__m256,const int)
_mm256_cmpeq_epi16AVX2 [2]immintrin.h__m256i _mm256_cmpeq_epi16(__m256i,__m256i)
_mm256_cmpeq_epi32AVX2 [2]immintrin.h__m256i _mm256_cmpeq_epi32(__m256i,__m256i)
_mm256_cmpeq_epi64AVX2 [2]immintrin.h__m256i _mm256_cmpeq_epi64(__m256i,__m256i)
_mm256_cmpeq_epi8AVX2 [2]immintrin.h__m256i _mm256_cmpeq_epi8(__m256i,__m256i)
_mm256_cmpgt_epi16AVX2 [2]immintrin.h__m256i _mm256_cmpgt_epi16(__m256i,__m256i)
_mm256_cmpgt_epi32AVX2 [2]immintrin.h__m256i _mm256_cmpgt_epi32(__m256i,__m256i)
_mm256_cmpgt_epi64AVX2 [2]immintrin.h__m256i _mm256_cmpgt_epi64(__m256i,__m256i)
_mm256_cmpgt_epi8AVX2 [2]immintrin.h__m256i _mm256_cmpgt_epi8(__m256i,__m256i)
_mm256_cvtepi16_epi32AVX2 [2]immintrin.h__m256i _mm256_cvtepi16_epi32(__m128i)
_mm256_cvtepi16_epi64AVX2 [2]immintrin.h__m256i _mm256_cvtepi16_epi64(__m128i)
_mm256_cvtepi32_epi64AVX2 [2]immintrin.h__m256i _mm256_cvtepi32_epi64(__m128i)
_mm256_cvtepi32_pdAVX [2]immintrin.h__m256d _mm256_cvtepi32_pd(__m128i)
_mm256_cvtepi32_psAVX [2]immintrin.h__m256 _mm256_cvtepi32_ps(__m256i)
_mm256_cvtepi8_epi16AVX2 [2]immintrin.h__m256i _mm256_cvtepi8_epi16(__m128i)
_mm256_cvtepi8_epi32AVX2 [2]immintrin.h__m256i _mm256_cvtepi8_epi32(__m128i)
_mm256_cvtepi8_epi64AVX2 [2]immintrin.h__m256i _mm256_cvtepi8_epi64(__m128i)
_mm256_cvtepu16_epi32AVX2 [2]immintrin.h__m256i _mm256_cvtepu16_epi32(__m128i)
_mm256_cvtepu16_epi64AVX2 [2]immintrin.h__m256i _mm256_cvtepu16_epi64(__m128i)
_mm256_cvtepu32_epi64AVX2 [2]immintrin.h__m256i _mm256_cvtepu32_epi64(__m128i)
_mm256_cvtepu8_epi16AVX2 [2]immintrin.h__m256i _mm256_cvtepu8_epi16(__m128i)
_mm256_cvtepu8_epi32AVX2 [2]immintrin.h__m256i _mm256_cvtepu8_epi32(__m128i)
_mm256_cvtepu8_epi64AVX2 [2]immintrin.h__m256i _mm256_cvtepu8_epi64(__m128i)
_mm256_cvtpd_epi32AVX [2]immintrin.h__m128i _mm256_cvtpd_epi32(__m256d)
_mm256_cvtpd_psAVX [2]immintrin.h__m128 _mm256_cvtpd_ps(__m256d)
_mm256_cvtph_psF16C [2]immintrin.h__m256 _mm256_cvtph_ps(__m128i)
_mm256_cvtps_epi32AVX [2]immintrin.h__m256i _mm256_cvtps_epi32(__m256)
_mm256_cvtps_pdAVX [2]immintrin.h__m256d _mm256_cvtps_pd(__m128)
_mm256_cvtps_phF16C [2]immintrin.h__m128i _mm256_cvtps_ph(__m256,const int)
_mm256_cvttpd_epi32AVX [2]immintrin.h__m128i _mm256_cvttpd_epi32(__m256d)
_mm256_cvttps_epi32AVX [2]immintrin.h__m256i _mm256_cvttps_epi32(__m256)
_mm256_div_pdAVX [2]immintrin.h__m256d _mm256_div_pd(__m256d,__m256d)
_mm256_div_psAVX [2]immintrin.h__m256 _mm256_div_ps(__m256,__m256)
_mm256_dp_psAVX [2]immintrin.h__m256 _mm256_dp_ps(__m256,__m256,const int)
_mm256_extractf128_pdAVX [2]immintrin.h__m128d _mm256_extractf128_pd(__m256d,const int)
_mm256_extractf128_psAVX [2]immintrin.h__m128 _mm256_extractf128_ps(__m256,const int)
_mm256_extractf128_si256AVX [2]immintrin.h__m128i _mm256_extractf128_si256(__m256i,const int)
_mm256_extracti128_si256AVX2 [2]immintrin.h__m128i _mm256_extracti128_si256(__m256i a,int offset)
_mm256_fmadd_pdFMA [2]immintrin.h__m256d _mm256_fmadd_pd (__m256d a,__m256d b,__m256d c)
_mm256_fmadd_psFMA [2]immintrin.h__m256 _mm256_fmadd_ps (__m256 a,__m256 b,__m256 c)
_mm256_fmaddsub_pdFMA [2]immintrin.h__m256d _mm256_fmaddsub_pd (__m256d a,__m256d b,__m256d c)
_mm256_fmaddsub_psFMA [2]immintrin.h__m256 _mm256_fmaddsub_ps (__m256 a,__m256 b,__m256 c)
_mm256_fmsub_pdFMA [2]immintrin.h__m256d _mm256_fmsub_pd (__m256d a,__m256d b,__m256d c)
_mm256_fmsub_psFMA [2]immintrin.h__m256 _mm256_fmsub_ps (__m256 a,__m256 b,__m256 c)
_mm256_fmsubadd_pdFMA [2]immintrin.h__m256d _mm256_fmsubadd_pd (__m256d a,__m256d b,__m256d c)
_mm256_fmsubadd_psFMA [2]immintrin.h__m256 _mm256_fmsubadd_ps (__m256 a,__m256 b,__m256 c)
_mm256_fnmadd_pdFMA [2]immintrin.h__m256d _mm256_fnmadd_pd (__m256d a,__m256d b,__m256d c)
_mm256_fnmadd_psFMA [2]immintrin.h__m256 _mm256_fnmadd_ps (__m256 a,__m256 b,__m256 c)
_mm256_fnmsub_pdFMA [2]immintrin.h__m256d _mm256_fnmsub_pd (__m256d a,__m256d b,__m256d c)
_mm256_fnmsub_psFMA [2]immintrin.h__m256 _mm256_fnmsub_ps (__m256 a,__m256 b,__m256 c)
_mm256_frcz_pdXOP [1]ammintrin.h__m256d _mm256_frcz_pd(__m256d)
_mm256_frcz_psXOP [1]ammintrin.h__m256 _mm256_frcz_ps(__m256)
_mm256_hadd_epi16AVX2 [2]immintrin.h__m256i _mm256_hadd_epi16(__m256i,__m256i)
_mm256_hadd_epi32AVX2 [2]immintrin.h__m256i _mm256_hadd_epi32(__m256i,__m256i)
_mm256_hadd_pdAVX [2]immintrin.h__m256d _mm256_hadd_pd(__m256d,__m256d)
_mm256_hadd_psAVX [2]immintrin.h__m256 _mm256_hadd_ps(__m256,__m256)
_mm256_hadds_epi16AVX2 [2]immintrin.h__m256i _mm256_hadds_epi16(__m256i,__m256i)
_mm256_hsub_epi16AVX2 [2]immintrin.h__m256i _mm256_hsub_epi16(__m256i,__m256i)
_mm256_hsub_epi32AVX2 [2]immintrin.h__m256i _mm256_hsub_epi32(__m256i,__m256i)
_mm256_hsub_pdAVX [2]immintrin.h__m256d _mm256_hsub_pd(__m256d,__m256d)
_mm256_hsub_psAVX [2]immintrin.h__m256 _mm256_hsub_ps(__m256,__m256)
_mm256_hsubs_epi16AVX2 [2]immintrin.h__m256i _mm256_hsubs_epi16(__m256i,__m256i)
_mm256_i32gather_epi32AVX2 [2]immintrin.h__m256i _mm256_i32gather_epi32(int const *base,__m256i index,const int scale)
_mm256_i32gather_epi64AVX2 [2]immintrin.h__m256i _mm256_i32gather_epi64(__int64 const *base,__m128i index,const int scale)
_mm256_i32gather_pdAVX2 [2]immintrin.h__m256d _mm256_i32gather_pd(double const *base,__m128i index,const int scale)
_mm256_i32gather_psAVX2 [2]immintrin.h__m256 _mm256_i32gather_ps(float const *base,__m256i index,const int scale)
_mm256_i64gather_epi32AVX2 [2]immintrin.h__m256i _mm256_i64gather_epi32(int const *base,__m256i index,const int scale)
_mm256_i64gather_epi64AVX2 [2]immintrin.h__m256i _mm256_i64gather_epi64(__int64 const *base,__m256i index,const int scale)
_mm256_i64gather_pdAVX2 [2]immintrin.h__m256d _mm256_i64gather_pd(double const *base,__m256i index,const int scale)
_mm256_i64gather_psAVX2 [2]immintrin.h__m128 _mm256_i64gather_ps(float const *base,__m256i index,const int scale)
_mm256_insertf128_pdAVX [2]immintrin.h__m256d _mm256_insertf128_pd(__m256d,__m128d,int )
_mm256_insertf128_psAVX [2]immintrin.h__m256 _mm256_insertf128_ps(__m256,__m128,int )
_mm256_insertf128_si256AVX [2]immintrin.h__m256i _mm256_insertf128_si256(__m256i,__m128i,int )
_mm256_inserti128_si256AVX2 [2]immintrin.h__m256i _mm256_inserti128_si256(__m256i,__m128i,int)
_mm256_lddqu_si256AVX [2]immintrin.h__m256i _mm256_lddqu_si256(__m256i *)
_mm256_load_pdAVX [2]immintrin.h__m256d _mm256_load_pd(double const *)
_mm256_load_psAVX [2]immintrin.h__m256 _mm256_load_ps(float const *)
_mm256_load_si256AVX [2]immintrin.h__m256i _mm256_load_si256(__m256i *)
_mm256_loadu_pdAVX [2]immintrin.h__m256d _mm256_loadu_pd(double const *)
_mm256_loadu_psAVX [2]immintrin.h__m256 _mm256_loadu_ps(float const *)
_mm256_loadu_si256AVX [2]immintrin.h__m256i _mm256_loadu_si256(__m256i *)
_mm256_macc_pdFMA4 [1]ammintrin.h__m256d _mm_macc_pd(__m256d,__m256d,__m256d)
_mm256_macc_psFMA4 [1]ammintrin.h__m256 _mm_macc_ps(__m256,__m256,__m256)
_mm256_madd_epi16AVX2 [2]immintrin.h__m256i _mm256_madd_epi16(__m256i,__m256i)
_mm256_maddsub_pdFMA4 [1]ammintrin.h__m256d _mm_maddsub_pd(__m256d,__m256d,__m256d)
_mm256_maddsub_psFMA4 [1]ammintrin.h__m256 _mm_maddsub_ps(__m256,__m256,__m256)
_mm256_maddubs_epi16AVX2 [2]immintrin.h__m256i _mm256_maddubs_epi16(__m256i,__m256i)
_mm256_mask_i32gather_epi32AVX2 [2]immintrin.h__m256i _mm256_mask_i32gather_epi32(__m256i src,int const *base,__m256i index,__m256i mask,const int scale)
_mm256_mask_i32gather_epi64AVX2 [2]immintrin.h__m256i _mm256_mask_i32gather_epi64(__m256i src,__int64 const *base,__m128i index,__m256i mask,const int scale)
_mm256_mask_i32gather_pdAVX2 [2]immintrin.h__m256d _mm256_mask_i32gather_pd(__m256d src,double const *base,__m128i index,__m256d mask,const int scale)
_mm256_mask_i32gather_psAVX2 [2]immintrin.h__m256 _mm256_mask_i32gather_ps(__m256 src,float const *base,__m256i index,__m256 mask,const int scale)
_mm256_mask_i64gather_epi32AVX2 [2]immintrin.h__m128i _mm256_mask_i64gather_epi32(__m128i src,int const *base,__m256i index,__m128i mask,const int scale)
_mm256_mask_i64gather_epi64AVX2 [2]immintrin.h__m256i _mm256_mask_i64gather_epi64(__m256i src,__int64 const *base,__m256i index,__m256i mask,const int scale)
_mm256_mask_i64gather_pdAVX2 [2]immintrin.h__m256d _mm256_mask_i64gather_pd(__m256d src,double const *base,__m256i index,__m256d mask,const int scale)
_mm256_mask_i64gather_psAVX2 [2]immintrin.h__m128 _mm256_mask_i64gather_ps(__m128 src,float const *base,__m256i index,__m128 mask,const int scale)
_mm256_maskload_epi32AVX2 [2]immintrin.h__m256i _mm256_maskload_epi32(int const *,__m256i)
_mm256_maskload_epi64AVX2 [2]immintrin.h__m256i _mm256_maskload_epi64( __int64 const *,__m256i)
_mm256_maskload_pdAVX [2]immintrin.h__m256d _mm256_maskload_pd(double const *,__m256i)
_mm256_maskload_psAVX [2]immintrin.h__m256 _mm256_maskload_ps(float const *,__m256i)
_mm256_maskstore_epi32AVX2 [2]immintrin.hvoid _mm256_maskstore_epi32(int *,__m256i,__m256i)
_mm256_maskstore_epi64AVX2 [2]immintrin.hvoid _mm256_maskstore_epi64(__int64 *,__m256i,__m256i)
_mm256_maskstore_pdAVX [2]immintrin.hvoid _mm256_maskstore_pd(double *,__m256i,__m256d)
_mm256_maskstore_psAVX [2]immintrin.hvoid _mm256_maskstore_ps(float *,__m256i,__m256)
_mm256_max_epi16AVX2 [2]immintrin.h__m256i _mm256_max_epi16(__m256i,__m256i)
_mm256_max_epi32AVX2 [2]immintrin.h__m256i _mm256_max_epi32(__m256i,__m256i)
_mm256_max_epi8AVX2 [2]immintrin.h__m256i _mm256_max_epi8(__m256i,__m256i)
_mm256_max_epu16AVX2 [2]immintrin.h__m256i _mm256_max_epu16(__m256i,__m256i)
_mm256_max_epu32AVX2 [2]immintrin.h__m256i _mm256_max_epu32(__m256i,__m256i)
_mm256_max_epu8AVX2 [2]immintrin.h__m256i _mm256_max_epu8(__m256i,__m256i)
_mm256_max_pdAVX [2]immintrin.h__m256d _mm256_max_pd(__m256d,__m256d)
_mm256_max_psAVX [2]immintrin.h__m256 _mm256_max_ps(__m256,__m256)
_mm256_min_epi16AVX2 [2]immintrin.h__m256i _mm256_min_epi16(__m256i,__m256i)
_mm256_min_epi32AVX2 [2]immintrin.h__m256i _mm256_min_epi32(__m256i,__m256i)
_mm256_min_epi8AVX2 [2]immintrin.h__m256i _mm256_min_epi8(__m256i,__m256i)
_mm256_min_epu16AVX2 [2]immintrin.h__m256i _mm256_min_epu16(__m256i,__m256i)
_mm256_min_epu32AVX2 [2]immintrin.h__m256i _mm256_min_epu32(__m256i,__m256i)
_mm256_min_epu8AVX2 [2]immintrin.h__m256i _mm256_min_epu8(__m256i,__m256i)
_mm256_min_pdAVX [2]immintrin.h__m256d _mm256_min_pd(__m256d,__m256d)
_mm256_min_psAVX [2]immintrin.h__m256 _mm256_min_ps(__m256,__m256)
_mm256_movedup_pdAVX [2]immintrin.h__m256d _mm256_movedup_pd(__m256d)
_mm256_movehdup_psAVX [2]immintrin.h__m256 _mm256_movehdup_ps(__m256)
_mm256_moveldup_psAVX [2]immintrin.h__m256 _mm256_moveldup_ps(__m256)
_mm256_movemask_epi8AVX2 [2]immintrin.hint _mm256_movemask_epi8(__m256i)
_mm256_movemask_pdAVX [2]immintrin.hint _mm256_movemask_pd(__m256d)
_mm256_movemask_psAVX [2]immintrin.hint _mm256_movemask_ps(__m256)
_mm256_mpsadbw_epu8AVX2 [2]immintrin.h__m256i _mm256_mpsadbw_epu8(__m256i,__m256i,const int)
_mm256_msub_pdFMA4 [1]ammintrin.h__m256d _mm_msub_pd(__m256d,__m256d,__m256d)
_mm256_msub_psFMA4 [1]ammintrin.h__m256 _mm_msub_ps(__m256,__m256,__m256)
_mm256_msubadd_pdFMA4 [1]ammintrin.h__m256d _mm_msubadd_pd(__m256d,__m256d,__m256d)
_mm256_msubadd_psFMA4 [1]ammintrin.h__m256 _mm_msubadd_ps(__m256,__m256,__m256)
_mm256_mul_epi32AVX2 [2]immintrin.h__m256i _mm256_mul_epi32(__m256i,__m256i)
_mm256_mul_epu32AVX2 [2]immintrin.h__m256i _mm256_mul_epu32(__m256i,__m256i)
_mm256_mul_pdAVX [2]immintrin.h__m256d _mm256_mul_pd(__m256d,__m256d)
_mm256_mul_psAVX [2]immintrin.h__m256 _mm256_mul_ps(__m256,__m256)
_mm256_mulhi_epi16AVX2 [2]immintrin.h__m256i _mm256_mulhi_epi16(__m256i,__m256i)
_mm256_mulhi_epu16AVX2 [2]immintrin.h__m256i _mm256_mulhi_epu16(__m256i,__m256i)
_mm256_mulhrs_epi16AVX2 [2]immintrin.h__m256i _mm256_mulhrs_epi16(__m256i,__m256i)
_mm256_mullo_epi16AVX2 [2]immintrin.h__m256i _mm256_mullo_epi16(__m256i,__m256i)
_mm256_mullo_epi32AVX2 [2]immintrin.h__m256i _mm256_mullo_epi32(__m256i,__m256i)
_mm256_nmacc_pdFMA4 [1]ammintrin.h__m256d _mm_nmacc_pd(__m256d,__m256d,__m256d)
_mm256_nmacc_psFMA4 [1]ammintrin.h__m256 _mm_nmacc_ps(__m256,__m256,__m256)
_mm256_nmsub_pdFMA4 [1]ammintrin.h__m256d _mm_nmsub_pd(__m256d,__m256d,__m256d)
_mm256_nmsub_psFMA4 [1]ammintrin.h__m256 _mm_nmsub_ps(__m256,__m256,__m256)
_mm256_or_pdAVX [2]immintrin.h__m256d _mm256_or_pd(__m256d,__m256d)
_mm256_or_psAVX [2]immintrin.h__m256 _mm256_or_ps(__m256,__m256)
_mm256_or_si256AVX2 [2]immintrin.h__m256i _mm256_or_si256(__m256i,__m256i)
_mm256_packs_epi16AVX2 [2]immintrin.h__m256i _mm256_packs_epi16(__m256i,__m256i)
_mm256_packs_epi32AVX2 [2]immintrin.h__m256i _mm256_packs_epi32(__m256i,__m256i)
_mm256_packus_epi16AVX2 [2]immintrin.h__m256i _mm256_packus_epi16(__m256i,__m256i)
_mm256_packus_epi32AVX2 [2]immintrin.h__m256i _mm256_packus_epi32(__m256i,__m256i)
_mm256_permute_pdAVX [2]immintrin.h__m256d _mm256_permute_pd(__m256d,int)
_mm256_permute_psAVX [2]immintrin.h__m256 _mm256_permute_ps(__m256,int)
_mm256_permute2_pdXOP [1]ammintrin.h__m256d _mm256_permute2_pd(__m256d,__m256d,__m256i,int)
_mm256_permute2_psXOP [1]ammintrin.h__m256 _mm256_permute2_ps(__m256,__m256,__m256i,int)
_mm256_permute2f128_pdAVX [2]immintrin.h__m256d _mm256_permute2f128_pd(__m256d,__m256d,int)
_mm256_permute2f128_psAVX [2]immintrin.h__m256 _mm256_permute2f128_ps(__m256,__m256,int)
_mm256_permute2f128_si256AVX [2]immintrin.h__m256i _mm256_permute2f128_si256(__m256i,__m256i,int)
_mm256_permute2x128_si256AVX2 [2]immintrin.h__m256i _mm256_permute2x128_si256(__m256i,__m256i,const int)
_mm256_permute4x64_epi64AVX2 [2]immintrin.h__m256i _mm256_permute4x64_epi64 (__m256i,const int)
_mm256_permute4x64_pdAVX2 [2]immintrin.h__m256d _mm256_permute4x64_pd(__m256d,const int)
_mm256_permutevar_pdAVX [2]immintrin.h__m256d _mm256_permutevar_pd(__m256d,__m256i)
_mm256_permutevar_psAVX [2]immintrin.h__m256 _mm256_permutevar_ps(__m256,__m256i)
_mm256_permutevar8x32_epi32AVX2 [2]immintrin.h__m256i _mm256_permutevar8x32_epi32(__m256i,__m256i)
_mm256_permutevar8x32_psAVX2 [2]immintrin.h__m256 _mm256_permutevar8x32_ps (__m256,__m256i)
_mm256_rcp_psAVX [2]immintrin.h__m256 _mm256_rcp_ps(__m256)
_mm256_round_pdAVX [2]immintrin.h__m256d _mm256_round_pd(__m256d,int)
_mm256_round_psAVX [2]immintrin.h__m256 _mm256_round_ps(__m256,int)
_mm256_rsqrt_psAVX [2]immintrin.h__m256 _mm256_rsqrt_ps(__m256)
_mm256_sad_epu8AVX2 [2]immintrin.h__m256i _mm256_sad_epu8(__m256i,__m256i)
_mm256_set_epi16AVX [2]immintrin.h(__m256i _mm256_set_epi16(short
_mm256_set_epi32AVX [2]immintrin.h__m256i _mm256_set_epi32(int,int,int,int,int,int,int,int)
_mm256_set_epi64xAVX [2]immintrin.h__m256i _mm256_set_epi64x(long long,long long,long long,long long)
_mm256_set_epi8AVX [2]immintrin.h__m256i _mm256_set_epi8(char,char,char,char,char,char,char,char,char,char,char,char,char,char,char,char,char,char,char,char,char,char,char,char,char,char,char,char,char,char,char,char)
_mm256_set_pdAVX [2]immintrin.h__m256d _mm256_set_pd(double,double,double,double)
_mm256_set_psAVX [2]immintrin.h__m256 _mm256_set_ps(float,float,float,float,float,float,float,float)
_mm256_set1_epi16AVX [2]immintrin.h__m256i _mm256_set1_epi16(short)
_mm256_set1_epi32AVX [2]immintrin.h__m256i _mm256_set1_epi32(int)
_mm256_set1_epi64xAVX [2]immintrin.h__m256i _mm256_set1_epi64x(long long)
_mm256_set1_epi8AVX [2]immintrin.h__m256i _mm256_set1_epi8(char)
_mm256_set1_pdAVX [2]immintrin.h__m256d _mm256_set1_pd(double)
_mm256_set1_psAVX [2]immintrin.h__m256 _mm256_set1_ps(float)
_mm256_setr_epi16AVX [2]immintrin.h(__m256i _mm256_setr_epi16(short
_mm256_setr_epi32AVX [2]immintrin.h__m256i _mm256_setr_epi32(int,int,int,int,int,int,int,int)
_mm256_setr_epi64xAVX [2]immintrin.h__m256i _mm256_setr_epi64x(long long,long long,long long,long long)
_mm256_setr_epi8AVX [2]immintrin.h(__m256i _mm256_setr_epi8(char
_mm256_setr_pdAVX [2]immintrin.h__m256d _mm256_setr_pd(double,double,double,double)
_mm256_setr_psAVX [2]immintrin.h__m256 _mm256_setr_ps(float,float,float,float,float,float,float,float)
_mm256_setzero_pdAVX [2]immintrin.h__m256d _mm256_setzero_pd(void)
_mm256_setzero_psAVX [2]immintrin.h__m256 _mm256_setzero_ps(void)
_mm256_setzero_si256AVX [2]immintrin.h__m256i _mm256_setzero_si256(void)
_mm256_shuffle_epi32AVX2 [2]immintrin.h__m256i _mm256_shuffle_epi32(__m256i,const int)
_mm256_shuffle_epi8AVX2 [2]immintrin.h__m256i _mm256_shuffle_epi8(__m256i,__m256i)
_mm256_shuffle_pdAVX [2]immintrin.h__m256d _mm256_shuffle_pd(__m256d,__m256d,const int)
_mm256_shuffle_psAVX [2]immintrin.h__m256 _mm256_shuffle_ps(__m256,__m256,const int)
_mm256_shufflehi_epi16AVX2 [2]immintrin.h__m256i _mm256_shufflehi_epi16(__m256i,const int)
_mm256_shufflelo_epi16AVX2 [2]immintrin.h__m256i _mm256_shufflelo_epi16(__m256i,const int)
_mm256_sign_epi16AVX2 [2]immintrin.h__m256i _mm256_sign_epi16(__m256i,__m256i)
_mm256_sign_epi32AVX2 [2]immintrin.h__m256i _mm256_sign_epi32(__m256i,__m256i)
_mm256_sign_epi8AVX2 [2]immintrin.h__m256i _mm256_sign_epi8(__m256i,__m256i)
_mm256_sll_epi16AVX2 [2]immintrin.h__m256i _mm256_sll_epi16(__m256i,__m128i)
_mm256_sll_epi32AVX2 [2]immintrin.h__m256i _mm256_sll_epi32(__m256i,__m128i)
_mm256_sll_epi64AVX2 [2]immintrin.h__m256i _mm256_sll_epi64(__m256i,__m128i)
_mm256_slli_epi16AVX2 [2]immintrin.h__m256i _mm256_slli_epi16(__m256i,int)
_mm256_slli_epi32AVX2 [2]immintrin.h__m256i _mm256_slli_epi32(__m256i,int)
_mm256_slli_epi64AVX2 [2]immintrin.h__m256i _mm256_slli_epi64(__m256i,int)
_mm256_slli_si256AVX2 [2]immintrin.h__m256i _mm256_slli_si256(__m256i,int)
_mm256_sllv_epi32AVX2 [2]immintrin.h__m256i _mm256_sllv_epi32(__m256i,__m256i)
_mm256_sllv_epi64AVX2 [2]immintrin.h__m256i _mm256_sllv_epi64(__m256i,__m256i)
_mm256_sqrt_pdAVX [2]immintrin.h__m256d _mm256_sqrt_pd(__m256d)
_mm256_sqrt_psAVX [2]immintrin.h__m256 _mm256_sqrt_ps(__m256)
_mm256_sra_epi16AVX2 [2]immintrin.h__m256i _mm256_sra_epi16(__m256i,__m128i)
_mm256_sra_epi32AVX2 [2]immintrin.h__m256i _mm256_sra_epi32(__m256i,__m128i)
_mm256_srai_epi16AVX2 [2]immintrin.h__m256i _mm256_srai_epi16(__m256i,int)
_mm256_srai_epi32AVX2 [2]immintrin.h__m256i _mm256_srai_epi32(__m256i,int)
_mm256_srav_epi32AVX2 [2]immintrin.h__m256i _mm256_srav_epi32(__m256i,__m256i)
_mm256_srl_epi16AVX2 [2]immintrin.h__m256i _mm256_srl_epi16(__m256i,__m128i)
_mm256_srl_epi32AVX2 [2]immintrin.h__m256i _mm256_srl_epi32(__m256i,__m128i)
_mm256_srl_epi64AVX2 [2]immintrin.h__m256i _mm256_srl_epi64(__m256i,__m128i)
_mm256_srli_epi16AVX2 [2]immintrin.h__m256i _mm256_srli_epi16(__m256i,int)
_mm256_srli_epi32AVX2 [2]immintrin.h__m256i _mm256_srli_epi32(__m256i,int)
_mm256_srli_epi64AVX2 [2]immintrin.h__m256i _mm256_srli_epi64(__m256i,int)
_mm256_srli_si256AVX2 [2]immintrin.h__m256i _mm256_srli_si256(__m256i,int)
_mm256_srlv_epi32AVX2 [2]immintrin.h__m256i _mm256_srlv_epi32(__m256i,__m256i)
_mm256_srlv_epi64AVX2 [2]immintrin.h__m256i _mm256_srlv_epi64(__m256i,__m256i)
_mm256_store_pdAVX [2]immintrin.hvoid _mm256_store_pd(double *,__m256d)
_mm256_store_psAVX [2]immintrin.hvoid _mm256_store_ps(float *,__m256)
_mm256_store_si256AVX [2]immintrin.hvoid _mm256_store_si256(__m256i *,__m256i)
_mm256_storeu_pdAVX [2]immintrin.hvoid _mm256_storeu_pd(double *,__m256d)
_mm256_storeu_psAVX [2]immintrin.hvoid _mm256_storeu_ps(float *,__m256)
_mm256_storeu_si256AVX [2]immintrin.hvoid _mm256_storeu_si256(__m256i *,__m256i)
_mm256_stream_load_si256AVX2 [2]immintrin.h__m256i _mm256_stream_load_si256(__m256i const *)
_mm256_stream_pdAVX [2]immintrin.hvoid __mm256_stream_pd(double *,__m256d)
_mm256_stream_psAVX [2]immintrin.hvoid _mm256_stream_ps(float *p,__m256 a)
_mm256_stream_si256AVX [2]immintrin.hvoid __mm256_stream_si256(__m256i *,__m256i)
_mm256_sub_epi16AVX2 [2]immintrin.h__m256i _mm256_sub_epi16(__m256i,__m256i)
_mm256_sub_epi32AVX2 [2]immintrin.h__m256i _mm256_sub_epi32(__m256i,__m256i)
_mm256_sub_epi64AVX2 [2]immintrin.h__m256i _mm256_sub_epi64(__m256i,__m256i)
_mm256_sub_epi8AVX2 [2]immintrin.h__m256i _mm256_sub_epi8(__m256i,__m256i)
_mm256_sub_pdAVX [2]immintrin.h__m256d _mm256_sub_pd(__m256d,__m256d)
_mm256_sub_psAVX [2]immintrin.h__m256 _mm256_sub_ps(__m256,__m256)
_mm256_subs_epi16AVX2 [2]immintrin.h__m256i _mm256_subs_epi16(__m256i,__m256i)
_mm256_subs_epi8AVX2 [2]immintrin.h__m256i _mm256_subs_epi8(__m256i,__m256i)
_mm256_subs_epu16AVX2 [2]immintrin.h__m256i _mm256_subs_epu16(__m256i,__m256i)
_mm256_subs_epu8AVX2 [2]immintrin.h__m256i _mm256_subs_epu8(__m256i,__m256i)
_mm256_testc_pdAVX [2]immintrin.hint _mm256_testc_pd(__m256d,__m256d)
_mm256_testc_psAVX [2]immintrin.hint _mm256_testc_ps(__m256,__m256)
_mm256_testc_si256AVX [2]immintrin.hint _mm256_testc_si256(__m256i,__m256i)
_mm256_testnzc_pdAVX [2]immintrin.hint _mm256_testnzc_pd(__m256d,__m256d)
_mm256_testnzc_psAVX [2]immintrin.hint _mm256_testnzc_ps(__m256,__m256)
_mm256_testnzc_si256AVX [2]immintrin.hint _mm256_testnzc_si256(__m256i,__m256i)
_mm256_testz_pdAVX [2]immintrin.hint _mm256_testz_pd(__m256d,__m256d)
_mm256_testz_psAVX [2]immintrin.hint _mm256_testz_ps(__m256,__m256)
_mm256_testz_si256AVX [2]immintrin.hint _mm256_testz_si256(__m256i,__m256i)
_mm256_unpackhi_epi16AVX2 [2]immintrin.h__m256i _mm256_unpackhi_epi16(__m256i,__m256i)
_mm256_unpackhi_epi32AVX2 [2]immintrin.h__m256i _mm256_unpackhi_epi32(__m256i,__m256i)
_mm256_unpackhi_epi64AVX2 [2]immintrin.h__m256i _mm256_unpackhi_epi64(__m256i,__m256i)
_mm256_unpackhi_epi8AVX2 [2]immintrin.h__m256i _mm256_unpackhi_epi8(__m256i,__m256i)
_mm256_unpackhi_pdAVX [2]immintrin.h__m256d _mm256_unpackhi_pd(__m256d,__m256d)
_mm256_unpackhi_psAVX [2]immintrin.h__m256 _mm256_unpackhi_ps(__m256,__m256)
_mm256_unpacklo_epi16AVX2 [2]immintrin.h__m256i _mm256_unpacklo_epi16(__m256i,__m256i)
_mm256_unpacklo_epi32AVX2 [2]immintrin.h__m256i _mm256_unpacklo_epi32(__m256i,__m256i)
_mm256_unpacklo_epi64AVX2 [2]immintrin.h__m256i _mm256_unpacklo_epi64(__m256i,__m256i)
_mm256_unpacklo_epi8AVX2 [2]immintrin.h__m256i _mm256_unpacklo_epi8(__m256i,__m256i)
_mm256_unpacklo_pdAVX [2]immintrin.h__m256d _mm256_unpacklo_pd(__m256d,__m256d)
_mm256_unpacklo_psAVX [2]immintrin.h__m256 _mm256_unpacklo_ps(__m256,__m256)
_mm256_xor_pdAVX [2]immintrin.h__m256d _mm256_xor_pd(__m256d,__m256d)
_mm256_xor_psAVX [2]immintrin.h__m256 _mm256_xor_ps(__m256,__m256)
_mm256_xor_si256AVX2 [2]immintrin.h__m256i _mm256_xor_si256(__m256i,__m256i)
_mm256_zeroallAVX [2]immintrin.hvoid _mm256_zeroall(void)
_mm256_zeroupperAVX [2]immintrin.hvoid _mm256_zeroupper(void)
__movsbintrin.hVOID __movsb(IN PBYTE,IN BYTE const *,IN SIZE_T)
__movsdintrin.hVOID __movsd(IN PDWORD,IN DWORD const *,IN SIZE_T)
__movsqintrin.hVOID __movsq(IN PDWORD64,IN DWORD64 const *,IN SIZE_T)
__movswintrin.hVOID __movsw(IN PWORD,IN WORD const *,IN SIZE_T)
_mul128intrin.h__int64 _mul128(__int64 multiplier,__int64 multiplicand,__int64 *highproduct)
__mulhintrin.h__int64 __mulh(__int64,__int64)
_mulx_u32BMI [2]immintrin.hunsigned int _mulx_u32(unsigned int,unsigned int,unsigned int*)
_mulx_u64BMI [2]immintrin.hunsigned __int64 _mulx_u64(unsigned __int64,unsigned __int64,unsigned __int64*)
__nopintrin.hvoid __nop(void)
__nvreg_restore_fenceintrin.hvoid __nvreg_restore_fence(void)
__nvreg_save_fenceintrin.hvoid __nvreg_save_fence(void)
__outbyteintrin.hvoid __outbyte(unsigned short Port,unsigned char Data)
__outbytestringintrin.hvoid __outbytestring(unsigned short Port,unsigned char *Buffer,unsigned long Count)
__outdwordintrin.hvoid __outdword(unsigned short Port,unsigned long Data)
__outdwordstringintrin.hvoid __outdwordstring(unsigned short Port,unsigned long *Buffer,unsigned long Count)
__outwordintrin.hvoid __outword(unsigned short Port,unsigned short Data)
__outwordstringintrin.hvoid __outwordstring(unsigned short Port,unsigned short *Buffer,unsigned long Count)
_pdep_u32BMI [2]immintrin.hunsigned int _pdep_u32(unsigned int,unsigned int)
_pdep_u64BMI [2]immintrin.hunsigned __int64 _pdep_u64(unsigned __int64,unsigned __int64)
_pext_u32BMI [2]immintrin.hunsigned int _pext_u32(unsigned int,unsigned int)
_pext_u64BMI [2]immintrin.hunsigned __int64 _pext_u64(unsigned __int64,unsigned __int64)
__popcntPOPCNTintrin.hunsigned int __popcnt(unsigned int)
__popcnt16POPCNTintrin.hunsigned short __popcnt16(unsigned short)
__popcnt64POPCNTintrin.hunsigned __int64 __popcnt64(unsigned __int64)
_rdrand16_stepRDRAND [2]immintrin.hint _rdrand16_step(unsigned short *)
_rdrand32_stepRDRAND [2]immintrin.hint _rdrand32_step(unsigned int *)
_rdrand64_stepRDRAND [2]immintrin.hint _rdrand64_step(unsigned __int64 *)
_rdseed16_stepRDSEED [2]immintrin.hint _rdseed16_step(unsigned short *)
_rdseed32_stepRDSEED [2]immintrin.hint _rdseed32_step(unsigned int *)
_rdseed64_stepRDSEED [2]immintrin.hint _rdseed64_step(unsigned __int64 *)
__rdtscintrin.hunsigned __int64 __rdtsc(void)
__rdtscpRDTSCPintrin.hunsigned __int64 __rdtscp(unsigned int*)
_ReadBarrierintrin.hvoid _ReadBarrier(void)
__readcr0intrin.hunsigned __int64 __readcr0(void)
__readcr2intrin.hunsigned __int64 __readcr2(void)
__readcr3intrin.hunsigned __int64 __readcr3(void)
__readcr4intrin.hunsigned __int64 __readcr4(void)
__readcr8intrin.hunsigned __int64 __readcr8(void)
__readdrintrin.hunsigned __int64 __readdr(unsigned)
__readeflagsintrin.hunsigned __int64 __readeflags(void)
_readfsbase_u32FSGSBASE [2]immintrin.hunsigned int _readfsbase_u32(void)
_readfsbase_u64FSGSBASE [2]immintrin.hunsigned __int64 _readfsbase_u64(void)
_readgsbase_u32FSGSBASE [2]immintrin.hunsigned int _readgsbase_u32(void)
_readgsbase_u64FSGSBASE [2]immintrin.hunsigned __int64 _readgsbase_u64(void)
__readgsbyteintrin.hunsigned char __readgsbyte(unsigned long Offset)
__readgsdwordintrin.hunsigned long __readgsdword(unsigned long Offset)
__readgsqwordintrin.hunsigned __int64 __readgsqword(unsigned long Offset)
__readgswordintrin.hunsigned short __readgsword(unsigned long Offset)
__readmsrintrin.hunsigned __int64 __readmsr(unsigned long)
__readpmcintrin.hunsigned __int64 __readpmc(unsigned long a)
_ReadWriteBarrierintrin.hvoid _ReadWriteBarrier(void)
_ReturnAddressintrin.hvoid * _ReturnAddress(void)
_rorx_u32BMI [2]immintrin.hunsigned int _rorx_u32(unsigned int,const unsigned int)
_rorx_u64BMI [2]immintrin.hunsigned __int64 _rorx_u64(unsigned __int64,const unsigned int)
_rotl16intrin.hunsigned short _rotl16(unsigned short value,unsigned char shift)
_rotl8intrin.hunsigned char _rotl8(unsigned char value,unsigned char shift)
_rotr16intrin.hunsigned short _rotr16(unsigned short value,unsigned char shift)
_rotr8intrin.hunsigned char _rotr8(unsigned char value,unsigned char shift)
_rsmintrin.hvoid _rsm(void)
_sarx_i32BMI [2]immintrin.hint _sarx_i32(int,unsigned int)
_sarx_i64BMI [2]immintrin.h__int64 _sarx_i64(__int64,unsigned int)
__segmentlimitintrin.hunsigned long __segmentlimit(unsigned long a)
_sgdtintrin.hvoid _sgdt(void*)
__shiftleft128intrin.hunsigned __int64 __shiftleft128(unsigned __int64 LowPart,unsigned __int64 HighPart,unsigned char Shift)
__shiftright128intrin.hunsigned __int64 __shiftright128(unsigned __int64 LowPart,unsigned __int64 HighPart,unsigned char Shift)
_shlx_u32BMI [2]immintrin.hunsigned int _shlx_u32(unsigned int,unsigned int)
_shlx_u64BMI [2]immintrin.hunsigned __int64 _shlx_u64(unsigned __int64,unsigned int)
_shrx_u32BMI [2]immintrin.hunsigned int _shrx_u32(unsigned int,unsigned int)
_shrx_u64BMI [2]immintrin.hunsigned __int64 _shrx_u64(unsigned __int64,unsigned int)
__sidtintrin.hvoid __sidt(void*)
__slwpcbLWP [1]ammintrin.hvoid *__slwpcb(void)
_stacSMAPintrin.hvoid _stac(void)
_store_be_u16

 _storebe_i16
MOVBEimmintrin.hvoid _store_be_u16(void *, unsigned short);

void _storebe_i16(void *, short); [3]
_store_be_u32

 _storebe_i32
MOVBEimmintrin.hvoid _store_be_u32(void *, unsigned int);

void _storebe_i32(void *, int); [3]
_store_be_u64

 _storebe_i64
MOVBEimmintrin.hvoid _store_be_u64(void *, unsigned __int64);

void _storebe_i64(void *, __int64); [3]
_Store_HLEReleaseHLE [2]immintrin.hvoid _Store_HLERelease(long volatile *,long)
_Store64_HLEReleaseHLE [2]immintrin.hvoid _Store64_HLERelease(__int64 volatile *,__int64)
_StorePointer_HLEReleaseHLE [2]immintrin.hvoid _StorePointer_HLERelease(void * volatile *,void *)
__stosbintrin.hvoid __stosb(IN PBYTE,IN BYTE,IN SIZE_T)
__stosdintrin.hvoid __stosd(IN PDWORD,IN DWORD,IN SIZE_T)
__stosqintrin.hvoid __stosq(IN PDWORD64,IN DWORD64,IN SIZE_T)
__stoswintrin.hvoid __stosw(IN PWORD,IN WORD,IN SIZE_T)
_subborrow_u16intrin.hunsigned char _subborrow_u16(unsigned char b_in,unsigned short src1,unsigned short src2,unsigned short *diff)
_subborrow_u32intrin.hunsigned char _subborrow_u32(unsigned char b_in,unsigned int src1,unsigned int src2,unsigned int *diff)
_subborrow_u64intrin.hunsigned char _subborrow_u64(unsigned char b_in,unsigned __int64 src1,unsigned __int64 src2,unsigned __int64 *diff)
_subborrow_u8intrin.hunsigned char _subborrow_u8(unsigned char b_in,unsigned char src1,unsigned char src2,unsigned char *diff)
__svm_clgiintrin.hvoid __svm_clgi(void)
__svm_invlpgaintrin.hvoid __svm_invlpga(void*,int)
__svm_skinitintrin.hvoid __svm_skinit(int)
__svm_stgiintrin.hvoid __svm_stgi(void)
__svm_vmloadintrin.hvoid __svm_vmload(size_t)
__svm_vmrunintrin.hvoid __svm_vmrun(size_t)
__svm_vmsaveintrin.hvoid __svm_vmsave(size_t)
_t1mskc_u32ABM [1]ammintrin.hunsigned int _t1mskc_u32(unsigned int)
_t1mskc_u64ABM [1]ammintrin.hunsigned __int64 _t1mskc_u64(unsigned __int64)
_tzcnt_u32BMIammintrin.h, immintrin.hunsigned int _tzcnt_u32(unsigned int)
_tzcnt_u64BMIammintrin.h, immintrin.hunsigned __int64 _tzcnt_u64(unsigned __int64)
_tzmsk_u32ABM [1]ammintrin.hunsigned int _tzmsk_u32(unsigned int)
_tzmsk_u64ABM [1]ammintrin.hunsigned __int64 _tzmsk_u64(unsigned __int64)
__ud2intrin.hvoid __ud2(void)
__ull_rshiftintrin.hunsigned __int64 [pascal/cdecl] __ull_rshift(unsigned __int64,int)
_umul128intrin.hunsigned __int64 _umul128(unsigned __int64 multiplier,unsigned __int64 multiplicand,unsigned __int64 *highproduct)
__umulhintrin.hunsigned __int64 __umulh(unsigned __int64,unsigned __int64)
__vmx_offintrin.hvoid __vmx_off(void)
__vmx_onintrin.hunsigned char __vmx_on(unsigned __int64*)
__vmx_vmclearintrin.hunsigned char __vmx_vmclear(unsigned __int64*)
__vmx_vmlaunchintrin.hunsigned char __vmx_vmlaunch(void)
__vmx_vmptrldintrin.hunsigned char __vmx_vmptrld(unsigned __int64*)
__vmx_vmptrstintrin.hvoid __vmx_vmptrst(unsigned __int64 *)
__vmx_vmreadintrin.hunsigned char __vmx_vmread(size_t,size_t*)
__vmx_vmresumeintrin.hunsigned char __vmx_vmresume(void)
__vmx_vmwriteintrin.hunsigned char __vmx_vmwrite(size_t,size_t)
__wbinvdintrin.hvoid __wbinvd(void)
_WriteBarrierintrin.hvoid _WriteBarrier(void)
__writecr0intrin.hvoid __writecr0(unsigned __int64)
__writecr3intrin.hvoid __writecr3(unsigned __int64)
__writecr4intrin.hvoid __writecr4(unsigned __int64)
__writecr8intrin.hvoid __writecr8(unsigned __int64)
__writedrintrin.hvoid __writedr(unsigned,unsigned __int64)
__writeeflagsintrin.hvoid __writeeflags(unsigned __int64)
_writefsbase_u32FSGSBASE [2]immintrin.hvoid _writefsbase_u32(unsigned int)
_writefsbase_u64FSGSBASE [2]immintrin.hvoid _writefsbase_u64(unsigned __int64)
_writegsbase_u32FSGSBASE [2]immintrin.hvoid _writegsbase_u32(unsigned int)
_writegsbase_u64FSGSBASE [2]immintrin.hvoid _writegsbase_u64(unsigned __int64)
__writegsbyteintrin.hvoid __writegsbyte(unsigned long Offset,unsigned char Data)
__writegsdwordintrin.hvoid __writegsdword(unsigned long Offset,unsigned long Data)
__writegsqwordintrin.hvoid __writegsqword(unsigned long Offset,unsigned __int64 Data)
__writegswordintrin.hvoid __writegsword(unsigned long Offset,unsigned short Data)
__writemsrintrin.hvoid __writemsr(unsigned long,unsigned __int64)
_xabortRTM [2]immintrin.hvoid _xabort(unsigned int)
_xbeginRTM [2]immintrin.hunsigned _xbegin(void)
_xendRTM [2]immintrin.hvoid _xend(void)
_xgetbvXSAVE [2]immintrin.hunsigned __int64 _xgetbv(unsigned int)
_xrstorXSAVE [2]immintrin.hvoid _xrstor(void const*,unsigned __int64)
_xrstor64XSAVE [2]immintrin.hvoid _xrstor64(void const*,unsigned __int64)
_xsaveXSAVE [2]immintrin.hvoid _xsave(void*,unsigned __int64)
_xsave64XSAVE [2]immintrin.hvoid _xsave64(void*,unsigned __int64)
_xsaveoptXSAVEOPT [2]immintrin.hvoid _xsaveopt(void*,unsigned __int64)
_xsaveopt64XSAVEOPT [2]immintrin.hvoid _xsaveopt64(void*,unsigned __int64)
_xsetbvXSAVE [2]immintrin.hvoid _xsetbv(unsigned int,unsigned __int64)
_xtestXTEST [2]immintrin.hunsigned char _xtest(void)

Intrínsecos del controlador
Intrínsecos ARM
Intrínsecos x86

Mostrar: