113 #if defined (__AVX512F__) 114 return _mm_ternarylogic_epi32 (a, b, c, 0x96);
126 static const __m128i
ghash_poly2 = { 0x1C2000000, 0xC200000000000000 };
132 gd->
hi = _mm_clmulepi64_si128 (a, b, 0x11);
134 gd->
lo = _mm_clmulepi64_si128 (a, b, 0x00);
136 gd->
mid = (_mm_clmulepi64_si128 (a, b, 0x01) ^
137 _mm_clmulepi64_si128 (a, b, 0x10));
148 __m128i
hi = _mm_clmulepi64_si128 (a, b, 0x11);
150 __m128i
lo = _mm_clmulepi64_si128 (a, b, 0x00);
171 _mm_clmulepi64_si128 (a, b, 0x01),
172 _mm_clmulepi64_si128 (a, b, 0x10));
183 __m128i midl = _mm_slli_si128 (gd->
mid, 8);
184 __m128i midr = _mm_srli_si128 (gd->
mid, 8);
198 gd->
lo ^= _mm_slli_si128 (r, 8);
212 _mm_slli_si128 (gd->
tmp_hi, 4));
230 r = _mm_srli_epi64 (H, 63);
231 H = _mm_slli_epi64 (H, 1);
232 H |= _mm_slli_si128 (r, 8);
233 r = _mm_srli_si128 (r, 8);
234 r = _mm_shuffle_epi32 (r, 0x24);
236 r = _mm_cmpeq_epi32 (r, (__m128i) (
u32x4) {1, 0, 0, 1});
static const __m128i ghash_poly
static_always_inline void ghash_precompute(__m128i H, __m128i *Hi, int count)
#define static_always_inline
static_always_inline void ghash_reduce(ghash_data_t *gd)
static_always_inline void ghash_reduce2(ghash_data_t *gd)
static_always_inline __m128i ghash_final(ghash_data_t *gd)
static_always_inline __m128i ghash_mul(__m128i a, __m128i b)
static_always_inline void ghash_mul_next(ghash_data_t *gd, __m128i a, __m128i b)
static const __m128i ghash_poly2
static_always_inline __m128i ghash_xor3(__m128i a, __m128i b, __m128i c)
static_always_inline void ghash_mul_first(ghash_data_t *gd, __m128i a, __m128i b)