crypto/snow3g: use IPsec library
[dpdk.git] / drivers / crypto / snow3g / rte_snow3g_pmd.c
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2016-2018 Intel Corporation
3  */
4
5 #include <rte_common.h>
6 #include <rte_hexdump.h>
7 #include <rte_cryptodev.h>
8 #include <rte_cryptodev_pmd.h>
9 #include <rte_bus_vdev.h>
10 #include <rte_malloc.h>
11 #include <rte_cpuflags.h>
12
13 #include "snow3g_pmd_private.h"
14
15 #define SNOW3G_IV_LENGTH 16
16 #define SNOW3G_MAX_BURST 8
17 #define BYTE_LEN 8
18
19 static uint8_t cryptodev_driver_id;
20
21 /** Get xform chain order. */
22 static enum snow3g_operation
23 snow3g_get_mode(const struct rte_crypto_sym_xform *xform)
24 {
25         if (xform == NULL)
26                 return SNOW3G_OP_NOT_SUPPORTED;
27
28         if (xform->next)
29                 if (xform->next->next != NULL)
30                         return SNOW3G_OP_NOT_SUPPORTED;
31
32         if (xform->type == RTE_CRYPTO_SYM_XFORM_AUTH) {
33                 if (xform->next == NULL)
34                         return SNOW3G_OP_ONLY_AUTH;
35                 else if (xform->next->type == RTE_CRYPTO_SYM_XFORM_CIPHER)
36                         return SNOW3G_OP_AUTH_CIPHER;
37                 else
38                         return SNOW3G_OP_NOT_SUPPORTED;
39         }
40
41         if (xform->type == RTE_CRYPTO_SYM_XFORM_CIPHER) {
42                 if (xform->next == NULL)
43                         return SNOW3G_OP_ONLY_CIPHER;
44                 else if (xform->next->type == RTE_CRYPTO_SYM_XFORM_AUTH)
45                         return SNOW3G_OP_CIPHER_AUTH;
46                 else
47                         return SNOW3G_OP_NOT_SUPPORTED;
48         }
49
50         return SNOW3G_OP_NOT_SUPPORTED;
51 }
52
53
54 /** Parse crypto xform chain and set private session parameters. */
55 int
56 snow3g_set_session_parameters(MB_MGR *mgr, struct snow3g_session *sess,
57                 const struct rte_crypto_sym_xform *xform)
58 {
59         const struct rte_crypto_sym_xform *auth_xform = NULL;
60         const struct rte_crypto_sym_xform *cipher_xform = NULL;
61         enum snow3g_operation mode;
62
63         /* Select Crypto operation - hash then cipher / cipher then hash */
64         mode = snow3g_get_mode(xform);
65
66         switch (mode) {
67         case SNOW3G_OP_CIPHER_AUTH:
68                 auth_xform = xform->next;
69
70                 /* Fall-through */
71         case SNOW3G_OP_ONLY_CIPHER:
72                 cipher_xform = xform;
73                 break;
74         case SNOW3G_OP_AUTH_CIPHER:
75                 cipher_xform = xform->next;
76                 /* Fall-through */
77         case SNOW3G_OP_ONLY_AUTH:
78                 auth_xform = xform;
79                 break;
80         case SNOW3G_OP_NOT_SUPPORTED:
81         default:
82                 SNOW3G_LOG(ERR, "Unsupported operation chain order parameter");
83                 return -ENOTSUP;
84         }
85
86         if (cipher_xform) {
87                 /* Only SNOW 3G UEA2 supported */
88                 if (cipher_xform->cipher.algo != RTE_CRYPTO_CIPHER_SNOW3G_UEA2)
89                         return -ENOTSUP;
90
91                 if (cipher_xform->cipher.iv.length != SNOW3G_IV_LENGTH) {
92                         SNOW3G_LOG(ERR, "Wrong IV length");
93                         return -EINVAL;
94                 }
95                 if (cipher_xform->cipher.key.length > SNOW3G_MAX_KEY_SIZE) {
96                         SNOW3G_LOG(ERR, "Not enough memory to store the key");
97                         return -ENOMEM;
98                 }
99
100                 sess->cipher_iv_offset = cipher_xform->cipher.iv.offset;
101
102                 /* Initialize key */
103                 IMB_SNOW3G_INIT_KEY_SCHED(mgr, cipher_xform->cipher.key.data,
104                                         &sess->pKeySched_cipher);
105         }
106
107         if (auth_xform) {
108                 /* Only SNOW 3G UIA2 supported */
109                 if (auth_xform->auth.algo != RTE_CRYPTO_AUTH_SNOW3G_UIA2)
110                         return -ENOTSUP;
111
112                 if (auth_xform->auth.digest_length != SNOW3G_DIGEST_LENGTH) {
113                         SNOW3G_LOG(ERR, "Wrong digest length");
114                         return -EINVAL;
115                 }
116                 if (auth_xform->auth.key.length > SNOW3G_MAX_KEY_SIZE) {
117                         SNOW3G_LOG(ERR, "Not enough memory to store the key");
118                         return -ENOMEM;
119                 }
120
121                 sess->auth_op = auth_xform->auth.op;
122
123                 if (auth_xform->auth.iv.length != SNOW3G_IV_LENGTH) {
124                         SNOW3G_LOG(ERR, "Wrong IV length");
125                         return -EINVAL;
126                 }
127                 sess->auth_iv_offset = auth_xform->auth.iv.offset;
128
129                 /* Initialize key */
130                 IMB_SNOW3G_INIT_KEY_SCHED(mgr, auth_xform->auth.key.data,
131                                         &sess->pKeySched_hash);
132         }
133
134         sess->op = mode;
135
136         return 0;
137 }
138
139 /** Get SNOW 3G session. */
140 static struct snow3g_session *
141 snow3g_get_session(struct snow3g_qp *qp, struct rte_crypto_op *op)
142 {
143         struct snow3g_session *sess = NULL;
144
145         if (op->sess_type == RTE_CRYPTO_OP_WITH_SESSION) {
146                 if (likely(op->sym->session != NULL))
147                         sess = (struct snow3g_session *)
148                                         get_sym_session_private_data(
149                                         op->sym->session,
150                                         cryptodev_driver_id);
151         } else {
152                 void *_sess = NULL;
153                 void *_sess_private_data = NULL;
154
155                 if (rte_mempool_get(qp->sess_mp, (void **)&_sess))
156                         return NULL;
157
158                 if (rte_mempool_get(qp->sess_mp_priv,
159                                 (void **)&_sess_private_data))
160                         return NULL;
161
162                 sess = (struct snow3g_session *)_sess_private_data;
163
164                 if (unlikely(snow3g_set_session_parameters(qp->mgr, sess,
165                                 op->sym->xform) != 0)) {
166                         rte_mempool_put(qp->sess_mp, _sess);
167                         rte_mempool_put(qp->sess_mp_priv, _sess_private_data);
168                         sess = NULL;
169                 }
170                 op->sym->session = (struct rte_cryptodev_sym_session *)_sess;
171                 set_sym_session_private_data(op->sym->session,
172                                 cryptodev_driver_id, _sess_private_data);
173         }
174
175         if (unlikely(sess == NULL))
176                 op->status = RTE_CRYPTO_OP_STATUS_INVALID_SESSION;
177
178
179         return sess;
180 }
181
182 /** Encrypt/decrypt mbufs with same cipher key. */
183 static uint8_t
184 process_snow3g_cipher_op(struct snow3g_qp *qp, struct rte_crypto_op **ops,
185                 struct snow3g_session *session,
186                 uint8_t num_ops)
187 {
188         unsigned i;
189         uint8_t processed_ops = 0;
190         const void *src[SNOW3G_MAX_BURST];
191         void *dst[SNOW3G_MAX_BURST];
192         const void *iv[SNOW3G_MAX_BURST];
193         uint32_t num_bytes[SNOW3G_MAX_BURST];
194
195         for (i = 0; i < num_ops; i++) {
196                 src[i] = rte_pktmbuf_mtod(ops[i]->sym->m_src, uint8_t *) +
197                                 (ops[i]->sym->cipher.data.offset >> 3);
198                 dst[i] = ops[i]->sym->m_dst ?
199                         rte_pktmbuf_mtod(ops[i]->sym->m_dst, uint8_t *) +
200                                 (ops[i]->sym->cipher.data.offset >> 3) :
201                         rte_pktmbuf_mtod(ops[i]->sym->m_src, uint8_t *) +
202                                 (ops[i]->sym->cipher.data.offset >> 3);
203                 iv[i] = rte_crypto_op_ctod_offset(ops[i], uint8_t *,
204                                 session->cipher_iv_offset);
205                 num_bytes[i] = ops[i]->sym->cipher.data.length >> 3;
206
207                 processed_ops++;
208         }
209
210         IMB_SNOW3G_F8_N_BUFFER(qp->mgr, &session->pKeySched_cipher, iv,
211                         src, dst, num_bytes, processed_ops);
212
213         return processed_ops;
214 }
215
216 /** Encrypt/decrypt mbuf (bit level function). */
217 static uint8_t
218 process_snow3g_cipher_op_bit(struct snow3g_qp *qp,
219                 struct rte_crypto_op *op,
220                 struct snow3g_session *session)
221 {
222         uint8_t *src, *dst;
223         uint8_t *iv;
224         uint32_t length_in_bits, offset_in_bits;
225
226         offset_in_bits = op->sym->cipher.data.offset;
227         src = rte_pktmbuf_mtod(op->sym->m_src, uint8_t *);
228         if (op->sym->m_dst == NULL) {
229                 op->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
230                 SNOW3G_LOG(ERR, "bit-level in-place not supported\n");
231                 return 0;
232         }
233         dst = rte_pktmbuf_mtod(op->sym->m_dst, uint8_t *);
234         iv = rte_crypto_op_ctod_offset(op, uint8_t *,
235                                 session->cipher_iv_offset);
236         length_in_bits = op->sym->cipher.data.length;
237
238         IMB_SNOW3G_F8_1_BUFFER_BIT(qp->mgr, &session->pKeySched_cipher, iv,
239                         src, dst, length_in_bits, offset_in_bits);
240
241         return 1;
242 }
243
244 /** Generate/verify hash from mbufs with same hash key. */
245 static int
246 process_snow3g_hash_op(struct snow3g_qp *qp, struct rte_crypto_op **ops,
247                 struct snow3g_session *session,
248                 uint8_t num_ops)
249 {
250         unsigned i;
251         uint8_t processed_ops = 0;
252         uint8_t *src, *dst;
253         uint32_t length_in_bits;
254         uint8_t *iv;
255
256         for (i = 0; i < num_ops; i++) {
257                 /* Data must be byte aligned */
258                 if ((ops[i]->sym->auth.data.offset % BYTE_LEN) != 0) {
259                         ops[i]->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
260                         SNOW3G_LOG(ERR, "Offset");
261                         break;
262                 }
263
264                 length_in_bits = ops[i]->sym->auth.data.length;
265
266                 src = rte_pktmbuf_mtod(ops[i]->sym->m_src, uint8_t *) +
267                                 (ops[i]->sym->auth.data.offset >> 3);
268                 iv = rte_crypto_op_ctod_offset(ops[i], uint8_t *,
269                                 session->auth_iv_offset);
270
271                 if (session->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
272                         dst = qp->temp_digest;
273
274                         IMB_SNOW3G_F9_1_BUFFER(qp->mgr,
275                                         &session->pKeySched_hash,
276                                         iv, src, length_in_bits, dst);
277                         /* Verify digest. */
278                         if (memcmp(dst, ops[i]->sym->auth.digest.data,
279                                         SNOW3G_DIGEST_LENGTH) != 0)
280                                 ops[i]->status = RTE_CRYPTO_OP_STATUS_AUTH_FAILED;
281                 } else  {
282                         dst = ops[i]->sym->auth.digest.data;
283
284                         IMB_SNOW3G_F9_1_BUFFER(qp->mgr,
285                                         &session->pKeySched_hash,
286                                         iv, src, length_in_bits, dst);
287                 }
288                 processed_ops++;
289         }
290
291         return processed_ops;
292 }
293
294 /** Process a batch of crypto ops which shares the same session. */
295 static int
296 process_ops(struct rte_crypto_op **ops, struct snow3g_session *session,
297                 struct snow3g_qp *qp, uint8_t num_ops,
298                 uint16_t *accumulated_enqueued_ops)
299 {
300         unsigned i;
301         unsigned enqueued_ops, processed_ops;
302
303 #ifdef RTE_LIBRTE_PMD_SNOW3G_DEBUG
304         for (i = 0; i < num_ops; i++) {
305                 if (!rte_pktmbuf_is_contiguous(ops[i]->sym->m_src) ||
306                                 (ops[i]->sym->m_dst != NULL &&
307                                 !rte_pktmbuf_is_contiguous(
308                                                 ops[i]->sym->m_dst))) {
309                         SNOW3G_LOG(ERR, "PMD supports only contiguous mbufs, "
310                                 "op (%p) provides noncontiguous mbuf as "
311                                 "source/destination buffer.\n", ops[i]);
312                         ops[i]->status = RTE_CRYPTO_OP_STATUS_INVALID_ARGS;
313                         return 0;
314                 }
315         }
316 #endif
317
318         switch (session->op) {
319         case SNOW3G_OP_ONLY_CIPHER:
320                 processed_ops = process_snow3g_cipher_op(qp, ops,
321                                 session, num_ops);
322                 break;
323         case SNOW3G_OP_ONLY_AUTH:
324                 processed_ops = process_snow3g_hash_op(qp, ops, session,
325                                 num_ops);
326                 break;
327         case SNOW3G_OP_CIPHER_AUTH:
328                 processed_ops = process_snow3g_cipher_op(qp, ops, session,
329                                 num_ops);
330                 process_snow3g_hash_op(qp, ops, session, processed_ops);
331                 break;
332         case SNOW3G_OP_AUTH_CIPHER:
333                 processed_ops = process_snow3g_hash_op(qp, ops, session,
334                                 num_ops);
335                 process_snow3g_cipher_op(qp, ops, session, processed_ops);
336                 break;
337         default:
338                 /* Operation not supported. */
339                 processed_ops = 0;
340         }
341
342         for (i = 0; i < num_ops; i++) {
343                 /*
344                  * If there was no error/authentication failure,
345                  * change status to successful.
346                  */
347                 if (ops[i]->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
348                         ops[i]->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
349                 /* Free session if a session-less crypto op. */
350                 if (ops[i]->sess_type == RTE_CRYPTO_OP_SESSIONLESS) {
351                         memset(session, 0, sizeof(struct snow3g_session));
352                         memset(ops[i]->sym->session, 0,
353                         rte_cryptodev_sym_get_existing_header_session_size(
354                                         ops[i]->sym->session));
355                         rte_mempool_put(qp->sess_mp_priv, session);
356                         rte_mempool_put(qp->sess_mp, ops[i]->sym->session);
357                         ops[i]->sym->session = NULL;
358                 }
359         }
360
361         enqueued_ops = rte_ring_enqueue_burst(qp->processed_ops,
362                         (void **)ops, processed_ops, NULL);
363         qp->qp_stats.enqueued_count += enqueued_ops;
364         *accumulated_enqueued_ops += enqueued_ops;
365
366         return enqueued_ops;
367 }
368
369 /** Process a crypto op with length/offset in bits. */
370 static int
371 process_op_bit(struct rte_crypto_op *op, struct snow3g_session *session,
372                 struct snow3g_qp *qp, uint16_t *accumulated_enqueued_ops)
373 {
374         unsigned enqueued_op, processed_op;
375
376         switch (session->op) {
377         case SNOW3G_OP_ONLY_CIPHER:
378                 processed_op = process_snow3g_cipher_op_bit(qp, op,
379                                 session);
380                 break;
381         case SNOW3G_OP_ONLY_AUTH:
382                 processed_op = process_snow3g_hash_op(qp, &op, session, 1);
383                 break;
384         case SNOW3G_OP_CIPHER_AUTH:
385                 processed_op = process_snow3g_cipher_op_bit(qp, op, session);
386                 if (processed_op == 1)
387                         process_snow3g_hash_op(qp, &op, session, 1);
388                 break;
389         case SNOW3G_OP_AUTH_CIPHER:
390                 processed_op = process_snow3g_hash_op(qp, &op, session, 1);
391                 if (processed_op == 1)
392                         process_snow3g_cipher_op_bit(qp, op, session);
393                 break;
394         default:
395                 /* Operation not supported. */
396                 processed_op = 0;
397         }
398
399         /*
400          * If there was no error/authentication failure,
401          * change status to successful.
402          */
403         if (op->status == RTE_CRYPTO_OP_STATUS_NOT_PROCESSED)
404                 op->status = RTE_CRYPTO_OP_STATUS_SUCCESS;
405
406         /* Free session if a session-less crypto op. */
407         if (op->sess_type == RTE_CRYPTO_OP_SESSIONLESS) {
408                 memset(op->sym->session, 0, sizeof(struct snow3g_session));
409                 rte_cryptodev_sym_session_free(op->sym->session);
410                 op->sym->session = NULL;
411         }
412
413         enqueued_op = rte_ring_enqueue_burst(qp->processed_ops,
414                         (void **)&op, processed_op, NULL);
415         qp->qp_stats.enqueued_count += enqueued_op;
416         *accumulated_enqueued_ops += enqueued_op;
417
418         return enqueued_op;
419 }
420
421 static uint16_t
422 snow3g_pmd_enqueue_burst(void *queue_pair, struct rte_crypto_op **ops,
423                 uint16_t nb_ops)
424 {
425         struct rte_crypto_op *c_ops[SNOW3G_MAX_BURST];
426         struct rte_crypto_op *curr_c_op;
427
428         struct snow3g_session *prev_sess = NULL, *curr_sess = NULL;
429         struct snow3g_qp *qp = queue_pair;
430         unsigned i;
431         uint8_t burst_size = 0;
432         uint16_t enqueued_ops = 0;
433         uint8_t processed_ops;
434
435         for (i = 0; i < nb_ops; i++) {
436                 curr_c_op = ops[i];
437
438                 /* Set status as enqueued (not processed yet) by default. */
439                 curr_c_op->status = RTE_CRYPTO_OP_STATUS_NOT_PROCESSED;
440
441                 curr_sess = snow3g_get_session(qp, curr_c_op);
442                 if (unlikely(curr_sess == NULL ||
443                                 curr_sess->op == SNOW3G_OP_NOT_SUPPORTED)) {
444                         curr_c_op->status =
445                                         RTE_CRYPTO_OP_STATUS_INVALID_SESSION;
446                         break;
447                 }
448
449                 /* If length/offset is at bit-level, process this buffer alone. */
450                 if (((curr_c_op->sym->cipher.data.length % BYTE_LEN) != 0)
451                                 || ((curr_c_op->sym->cipher.data.offset
452                                         % BYTE_LEN) != 0)) {
453                         /* Process the ops of the previous session. */
454                         if (prev_sess != NULL) {
455                                 processed_ops = process_ops(c_ops, prev_sess,
456                                 qp, burst_size, &enqueued_ops);
457                                 if (processed_ops < burst_size) {
458                                         burst_size = 0;
459                                         break;
460                                 }
461
462                                 burst_size = 0;
463                                 prev_sess = NULL;
464                         }
465
466                         processed_ops = process_op_bit(curr_c_op, curr_sess,
467                                                         qp, &enqueued_ops);
468                         if (processed_ops != 1)
469                                 break;
470
471                         continue;
472                 }
473
474                 /* Batch ops that share the same session. */
475                 if (prev_sess == NULL) {
476                         prev_sess = curr_sess;
477                         c_ops[burst_size++] = curr_c_op;
478                 } else if (curr_sess == prev_sess) {
479                         c_ops[burst_size++] = curr_c_op;
480                         /*
481                          * When there are enough ops to process in a batch,
482                          * process them, and start a new batch.
483                          */
484                         if (burst_size == SNOW3G_MAX_BURST) {
485                                 processed_ops = process_ops(c_ops, prev_sess,
486                                                 qp, burst_size, &enqueued_ops);
487                                 if (processed_ops < burst_size) {
488                                         burst_size = 0;
489                                         break;
490                                 }
491
492                                 burst_size = 0;
493                                 prev_sess = NULL;
494                         }
495                 } else {
496                         /*
497                          * Different session, process the ops
498                          * of the previous session.
499                          */
500                         processed_ops = process_ops(c_ops, prev_sess,
501                                         qp, burst_size, &enqueued_ops);
502                         if (processed_ops < burst_size) {
503                                 burst_size = 0;
504                                 break;
505                         }
506
507                         burst_size = 0;
508                         prev_sess = curr_sess;
509
510                         c_ops[burst_size++] = curr_c_op;
511                 }
512         }
513
514         if (burst_size != 0) {
515                 /* Process the crypto ops of the last session. */
516                 processed_ops = process_ops(c_ops, prev_sess,
517                                 qp, burst_size, &enqueued_ops);
518         }
519
520         qp->qp_stats.enqueue_err_count += nb_ops - enqueued_ops;
521         return enqueued_ops;
522 }
523
524 static uint16_t
525 snow3g_pmd_dequeue_burst(void *queue_pair,
526                 struct rte_crypto_op **c_ops, uint16_t nb_ops)
527 {
528         struct snow3g_qp *qp = queue_pair;
529
530         unsigned nb_dequeued;
531
532         nb_dequeued = rte_ring_dequeue_burst(qp->processed_ops,
533                         (void **)c_ops, nb_ops, NULL);
534         qp->qp_stats.dequeued_count += nb_dequeued;
535
536         return nb_dequeued;
537 }
538
539 static int cryptodev_snow3g_remove(struct rte_vdev_device *vdev);
540
541 static int
542 cryptodev_snow3g_create(const char *name,
543                         struct rte_vdev_device *vdev,
544                         struct rte_cryptodev_pmd_init_params *init_params)
545 {
546         struct rte_cryptodev *dev;
547         struct snow3g_private *internals;
548         MB_MGR *mgr;
549
550         dev = rte_cryptodev_pmd_create(name, &vdev->device, init_params);
551         if (dev == NULL) {
552                 SNOW3G_LOG(ERR, "failed to create cryptodev vdev");
553                 goto init_error;
554         }
555
556         dev->driver_id = cryptodev_driver_id;
557         dev->dev_ops = rte_snow3g_pmd_ops;
558
559         /* Register RX/TX burst functions for data path. */
560         dev->dequeue_burst = snow3g_pmd_dequeue_burst;
561         dev->enqueue_burst = snow3g_pmd_enqueue_burst;
562
563         dev->feature_flags = RTE_CRYPTODEV_FF_SYMMETRIC_CRYPTO |
564                         RTE_CRYPTODEV_FF_SYM_OPERATION_CHAINING;
565
566         mgr = alloc_mb_mgr(0);
567         if (mgr == NULL)
568                 return -ENOMEM;
569
570         if (rte_cpu_get_flag_enabled(RTE_CPUFLAG_AVX2)) {
571                 dev->feature_flags |= RTE_CRYPTODEV_FF_CPU_AVX2;
572                 init_mb_mgr_avx2(mgr);
573         } else if (rte_cpu_get_flag_enabled(RTE_CPUFLAG_AVX)) {
574                 dev->feature_flags |= RTE_CRYPTODEV_FF_CPU_AVX;
575                 init_mb_mgr_avx(mgr);
576         } else {
577                 dev->feature_flags |= RTE_CRYPTODEV_FF_CPU_SSE;
578                 init_mb_mgr_sse(mgr);
579         }
580
581         internals = dev->data->dev_private;
582         internals->mgr = mgr;
583
584         internals->max_nb_queue_pairs = init_params->max_nb_queue_pairs;
585
586         return 0;
587 init_error:
588         SNOW3G_LOG(ERR, "driver %s: cryptodev_snow3g_create failed",
589                         init_params->name);
590
591         cryptodev_snow3g_remove(vdev);
592         return -EFAULT;
593 }
594
595 static int
596 cryptodev_snow3g_probe(struct rte_vdev_device *vdev)
597 {
598         struct rte_cryptodev_pmd_init_params init_params = {
599                 "",
600                 sizeof(struct snow3g_private),
601                 rte_socket_id(),
602                 RTE_CRYPTODEV_PMD_DEFAULT_MAX_NB_QUEUE_PAIRS
603         };
604         const char *name;
605         const char *input_args;
606
607         name = rte_vdev_device_name(vdev);
608         if (name == NULL)
609                 return -EINVAL;
610         input_args = rte_vdev_device_args(vdev);
611
612         rte_cryptodev_pmd_parse_input_args(&init_params, input_args);
613
614         return cryptodev_snow3g_create(name, vdev, &init_params);
615 }
616
617 static int
618 cryptodev_snow3g_remove(struct rte_vdev_device *vdev)
619 {
620         struct rte_cryptodev *cryptodev;
621         const char *name;
622         struct snow3g_private *internals;
623
624         name = rte_vdev_device_name(vdev);
625         if (name == NULL)
626                 return -EINVAL;
627
628         cryptodev = rte_cryptodev_pmd_get_named_dev(name);
629         if (cryptodev == NULL)
630                 return -ENODEV;
631
632         internals = cryptodev->data->dev_private;
633
634         free_mb_mgr(internals->mgr);
635
636         return rte_cryptodev_pmd_destroy(cryptodev);
637 }
638
639 static struct rte_vdev_driver cryptodev_snow3g_pmd_drv = {
640         .probe = cryptodev_snow3g_probe,
641         .remove = cryptodev_snow3g_remove
642 };
643
644 static struct cryptodev_driver snow3g_crypto_drv;
645
646 RTE_PMD_REGISTER_VDEV(CRYPTODEV_NAME_SNOW3G_PMD, cryptodev_snow3g_pmd_drv);
647 RTE_PMD_REGISTER_ALIAS(CRYPTODEV_NAME_SNOW3G_PMD, cryptodev_snow3g_pmd);
648 RTE_PMD_REGISTER_PARAM_STRING(CRYPTODEV_NAME_SNOW3G_PMD,
649         "max_nb_queue_pairs=<int> "
650         "socket_id=<int>");
651 RTE_PMD_REGISTER_CRYPTO_DRIVER(snow3g_crypto_drv,
652                 cryptodev_snow3g_pmd_drv.driver, cryptodev_driver_id);
653
654 RTE_INIT(snow3g_init_log)
655 {
656         snow3g_logtype_driver = rte_log_register("pmd.crypto.snow3g");
657 }