FD.io VPP  v19.04.4-rc0-5-ge88582fac
Vector Packet Processing
aes_cbc.c
Go to the documentation of this file.
1 /*
2  *------------------------------------------------------------------
3  * Copyright (c) 2019 Cisco and/or its affiliates.
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at:
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  *------------------------------------------------------------------
16  */
17 
18 #include <vlib/vlib.h>
19 #include <vnet/plugin/plugin.h>
20 #include <vnet/crypto/crypto.h>
21 #include <x86intrin.h>
23 #include <crypto_ia32/aesni.h>
24 
26 aes_cbc_dec (__m128i * k, u8 * src, u8 * dst, u8 * iv, int count,
27  aesni_key_size_t rounds)
28 {
29  __m128i r0, r1, r2, r3, c0, c1, c2, c3, f;
30  int i;
31 
32  f = _mm_loadu_si128 ((__m128i *) iv);
33 
34  while (count >= 64)
35  {
36  _mm_prefetch (src + 128, _MM_HINT_T0);
37  _mm_prefetch (dst + 128, _MM_HINT_T0);
38 
39  c0 = _mm_loadu_si128 (((__m128i *) src + 0));
40  c1 = _mm_loadu_si128 (((__m128i *) src + 1));
41  c2 = _mm_loadu_si128 (((__m128i *) src + 2));
42  c3 = _mm_loadu_si128 (((__m128i *) src + 3));
43 
44  r0 = c0 ^ k[0];
45  r1 = c1 ^ k[0];
46  r2 = c2 ^ k[0];
47  r3 = c3 ^ k[0];
48 
49  for (i = 1; i < rounds; i++)
50  {
51  r0 = _mm_aesdec_si128 (r0, k[i]);
52  r1 = _mm_aesdec_si128 (r1, k[i]);
53  r2 = _mm_aesdec_si128 (r2, k[i]);
54  r3 = _mm_aesdec_si128 (r3, k[i]);
55  }
56 
57  r0 = _mm_aesdeclast_si128 (r0, k[i]);
58  r1 = _mm_aesdeclast_si128 (r1, k[i]);
59  r2 = _mm_aesdeclast_si128 (r2, k[i]);
60  r3 = _mm_aesdeclast_si128 (r3, k[i]);
61 
62  _mm_storeu_si128 ((__m128i *) dst + 0, r0 ^ f);
63  _mm_storeu_si128 ((__m128i *) dst + 1, r1 ^ c0);
64  _mm_storeu_si128 ((__m128i *) dst + 2, r2 ^ c1);
65  _mm_storeu_si128 ((__m128i *) dst + 3, r3 ^ c2);
66 
67  f = c3;
68 
69  count -= 64;
70  src += 64;
71  dst += 64;
72  }
73 
74  while (count > 0)
75  {
76  c0 = _mm_loadu_si128 (((__m128i *) src));
77  r0 = c0 ^ k[0];
78  for (i = 1; i < rounds; i++)
79  r0 = _mm_aesdec_si128 (r0, k[i]);
80  r0 = _mm_aesdeclast_si128 (r0, k[i]);
81  _mm_storeu_si128 ((__m128i *) dst, r0 ^ f);
82  f = c0;
83  count -= 16;
84  src += 16;
85  dst += 16;
86  }
87 }
88 
91  u32 n_ops, aesni_key_size_t ks)
92 {
95  vm->thread_index);
96  int rounds = AESNI_KEY_ROUNDS (ks);
97  u8 dummy[8192];
98  u8 *src[4] = { };
99  u8 *dst[4] = { };
100  u8 *key[4] = { };
101  u32x4 dummy_mask, len = { };
102  u32 i, j, count, n_left = n_ops;
103  __m128i r[4] = { }, k[4][rounds + 1];
104 
105 more:
106  for (i = 0; i < 4; i++)
107  if (len[i] == 0)
108  {
109  if (n_left == 0)
110  {
111  /* no more work to enqueue, so we are enqueueing dummy buffer */
112  src[i] = dst[i] = dummy;
113  len[i] = sizeof (dummy);
114  dummy_mask[i] = 0;
115  }
116  else
117  {
118  if (ops[0]->flags & VNET_CRYPTO_OP_FLAG_INIT_IV)
119  {
120  r[i] = ptd->cbc_iv[i];
121  _mm_storeu_si128 ((__m128i *) ops[0]->iv, r[i]);
122  ptd->cbc_iv[i] = _mm_aesenc_si128 (r[i], r[i]);
123  }
124  else
125  r[i] = _mm_loadu_si128 ((__m128i *) ops[0]->iv);
126  src[i] = ops[0]->src;
127  dst[i] = ops[0]->dst;
128  len[i] = ops[0]->len;
129  dummy_mask[i] = ~0;
130  if (key[i] != ops[0]->key)
131  {
132  aes_key_expand (k[i], ops[0]->key, ks);
133  key[i] = ops[0]->key;
134  }
135  ops[0]->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
136  n_left--;
137  ops++;
138  }
139  }
140 
141  count = u32x4_min_scalar (len);
142 
143  ASSERT (count % 16 == 0);
144 
145  for (i = 0; i < count; i += 16)
146  {
147  r[0] ^= _mm_loadu_si128 ((__m128i *) (src[0] + i)) ^ k[0][0];
148  r[1] ^= _mm_loadu_si128 ((__m128i *) (src[1] + i)) ^ k[1][0];
149  r[2] ^= _mm_loadu_si128 ((__m128i *) (src[2] + i)) ^ k[2][0];
150  r[3] ^= _mm_loadu_si128 ((__m128i *) (src[3] + i)) ^ k[3][0];
151 
152  for (j = 1; j < rounds; j++)
153  {
154  r[0] = _mm_aesenc_si128 (r[0], k[0][j]);
155  r[1] = _mm_aesenc_si128 (r[1], k[1][j]);
156  r[2] = _mm_aesenc_si128 (r[2], k[2][j]);
157  r[3] = _mm_aesenc_si128 (r[3], k[3][j]);
158  }
159 
160  r[0] = _mm_aesenclast_si128 (r[0], k[0][j]);
161  r[1] = _mm_aesenclast_si128 (r[1], k[1][j]);
162  r[2] = _mm_aesenclast_si128 (r[2], k[2][j]);
163  r[3] = _mm_aesenclast_si128 (r[3], k[3][j]);
164 
165  _mm_storeu_si128 ((__m128i *) (dst[0] + i), r[0]);
166  _mm_storeu_si128 ((__m128i *) (dst[1] + i), r[1]);
167  _mm_storeu_si128 ((__m128i *) (dst[2] + i), r[2]);
168  _mm_storeu_si128 ((__m128i *) (dst[3] + i), r[3]);
169  }
170 
171  for (i = 0; i < 4; i++)
172  {
173  src[i] += count;
174  dst[i] += count;
175  len[i] -= count;
176  }
177 
178  if (n_left > 0)
179  goto more;
180 
181  if (!u32x4_is_all_zero (len & dummy_mask))
182  goto more;
183 
184  return n_ops;
185 }
186 
189  u32 n_ops, aesni_key_size_t ks)
190 {
191  int rounds = AESNI_KEY_ROUNDS (ks);
192  vnet_crypto_op_t *op = ops[0];
193  u32 n_left = n_ops;
194  u8 *last_key;
195  __m128i k[rounds + 1];
196 
197  ASSERT (n_ops >= 1);
198 
199 key_expand:
200  last_key = op->key;
201  aes_key_expand (k, op->key, ks);
202  aes_key_enc_to_dec (k, ks);
203 
204 decrypt:
205  aes_cbc_dec (k, op->src, op->dst, op->iv, op->len, rounds);
206  op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
207 
208  if (--n_left)
209  {
210  op += 1;
211  if (last_key != op->key)
212  goto key_expand;
213  goto decrypt;
214  }
215 
216  return n_ops;
217 }
218 
219 #define foreach_aesni_cbc_handler_type _(128) _(192) _(256)
220 
221 #define _(x) \
222 static u32 aesni_ops_dec_aes_cbc_##x \
223 (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops) \
224 { return aesni_ops_dec_aes_cbc (vm, ops, n_ops, AESNI_KEY_##x); } \
225 static u32 aesni_ops_enc_aes_cbc_##x \
226 (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops) \
227 { return aesni_ops_enc_aes_cbc (vm, ops, n_ops, AESNI_KEY_##x); } \
228 
230 #undef _
231 
232 #include <fcntl.h>
233 
234 clib_error_t *
236 {
239  clib_error_t *err = 0;
240  int fd;
241 
242  if ((fd = open ("/dev/urandom", O_RDONLY)) < 0)
243  return clib_error_return_unix (0, "failed to open '/dev/urandom'");
244 
245  /* *INDENT-OFF* */
246  vec_foreach (ptd, cm->per_thread_data)
247  {
248  for (int i = 0; i < 4; i++)
249  {
250  if (read(fd, ptd->cbc_iv, sizeof (ptd->cbc_iv)) !=
251  sizeof (ptd->cbc_iv))
252  {
253  err = clib_error_return_unix (0, "'/dev/urandom' read failure");
254  goto error;
255  }
256  }
257  }
258  /* *INDENT-ON* */
259 
260 #define _(x) \
261  vnet_crypto_register_ops_handler (vm, cm->crypto_engine_index, \
262  VNET_CRYPTO_OP_AES_##x##_CBC_ENC, \
263  aesni_ops_enc_aes_cbc_##x); \
264  vnet_crypto_register_ops_handler (vm, cm->crypto_engine_index, \
265  VNET_CRYPTO_OP_AES_##x##_CBC_DEC, \
266  aesni_ops_dec_aes_cbc_##x);
268 #undef _
269 
270 error:
271  close (fd);
272  return err;
273 }
274 
275 /*
276  * fd.io coding-style-patch-verification: ON
277  *
278  * Local Variables:
279  * eval: (c-set-style "gnu")
280  * End:
281  */
crypto_ia32_main_t crypto_ia32_main
Definition: main.c:23
u32 flags
Definition: vhost_user.h:115
static_always_inline u32 u32x4_min_scalar(u32x4 v)
Definition: vector_sse42.h:579
u32 thread_index
Definition: main.h:197
int i
#define AESNI_KEY_ROUNDS(x)
Definition: aesni.h:28
vl_api_ip4_address_t dst
Definition: ipsec_gre.api:39
static_always_inline void aes_key_expand(__m128i *k, u8 *key, aesni_key_size_t ks)
Definition: aesni.h:180
unsigned char u8
Definition: types.h:56
#define static_always_inline
Definition: clib.h:99
aesni_key_size_t
Definition: aesni.h:21
static_always_inline u32 aesni_ops_dec_aes_cbc(vlib_main_t *vm, vnet_crypto_op_t *ops[], u32 n_ops, aesni_key_size_t ks)
Definition: aes_cbc.c:188
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
unsigned int u32
Definition: types.h:88
static u8 iv[]
Definition: aes_cbc.c:24
#define clib_error_return_unix(e, args...)
Definition: error.h:102
vl_api_ip4_address_t src
Definition: ipsec_gre.api:38
u8 len
Definition: ip_types.api:49
#define VNET_CRYPTO_OP_FLAG_INIT_IV
Definition: crypto.h:113
vlib_main_t * vm
Definition: buffer.c:312
crypto_ia32_per_thread_data_t * per_thread_data
Definition: crypto_ia32.h:29
static_always_inline void aes_cbc_dec(__m128i *k, u8 *src, u8 *dst, u8 *iv, int count, aesni_key_size_t rounds)
Definition: aes_cbc.c:26
#define ASSERT(truth)
size_t count
Definition: vapi.c:47
static_always_inline u32 aesni_ops_enc_aes_cbc(vlib_main_t *vm, vnet_crypto_op_t *ops[], u32 n_ops, aesni_key_size_t ks)
Definition: aes_cbc.c:90
static_always_inline void aes_key_enc_to_dec(__m128i *k, aesni_key_size_t ks)
Definition: aesni.h:198
typedef key
Definition: ipsec.api:244
clib_error_t * crypto_ia32_aesni_cbc_init(vlib_main_t *vm)
Definition: aes_cbc.c:235
vnet_crypto_op_status_t status
Definition: crypto.h:111
#define vec_foreach(var, vec)
Vector iterator.
unsigned long long u32x4
Definition: ixge.c:28
#define foreach_aesni_cbc_handler_type
Definition: aes_cbc.c:219