9405cefdd56b61c1bf6f84018b6fb9ac46e16adf
[dpdk.git] / app / test-crypto-perf / cperf_ops.c
1 /*-
2  *   BSD LICENSE
3  *
4  *   Copyright(c) 2016-2017 Intel Corporation. All rights reserved.
5  *
6  *   Redistribution and use in source and binary forms, with or without
7  *   modification, are permitted provided that the following conditions
8  *   are met:
9  *
10  *     * Redistributions of source code must retain the above copyright
11  *       notice, this list of conditions and the following disclaimer.
12  *     * Redistributions in binary form must reproduce the above copyright
13  *       notice, this list of conditions and the following disclaimer in
14  *       the documentation and/or other materials provided with the
15  *       distribution.
16  *     * Neither the name of Intel Corporation nor the names of its
17  *       contributors may be used to endorse or promote products derived
18  *       from this software without specific prior written permission.
19  *
20  *   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21  *   "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22  *   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
23  *   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
24  *   OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
25  *   SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
26  *   LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27  *   DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28  *   THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29  *   (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30  *   OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31  */
32
33 #include <rte_cryptodev.h>
34
35 #include "cperf_ops.h"
36 #include "cperf_test_vectors.h"
37
38 static int
39 cperf_set_ops_null_cipher(struct rte_crypto_op **ops,
40                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
41                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
42                 const struct cperf_options *options,
43                 const struct cperf_test_vector *test_vector __rte_unused,
44                 uint16_t iv_offset __rte_unused)
45 {
46         uint16_t i;
47
48         for (i = 0; i < nb_ops; i++) {
49                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
50
51                 rte_crypto_op_attach_sym_session(ops[i], sess);
52
53                 sym_op->m_src = bufs_in[i];
54                 sym_op->m_dst = bufs_out[i];
55
56                 /* cipher parameters */
57                 sym_op->cipher.data.length = options->test_buffer_size;
58                 sym_op->cipher.data.offset = 0;
59         }
60
61         return 0;
62 }
63
64 static int
65 cperf_set_ops_null_auth(struct rte_crypto_op **ops,
66                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
67                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
68                 const struct cperf_options *options,
69                 const struct cperf_test_vector *test_vector __rte_unused,
70                 uint16_t iv_offset __rte_unused)
71 {
72         uint16_t i;
73
74         for (i = 0; i < nb_ops; i++) {
75                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
76
77                 rte_crypto_op_attach_sym_session(ops[i], sess);
78
79                 sym_op->m_src = bufs_in[i];
80                 sym_op->m_dst = bufs_out[i];
81
82                 /* auth parameters */
83                 sym_op->auth.data.length = options->test_buffer_size;
84                 sym_op->auth.data.offset = 0;
85         }
86
87         return 0;
88 }
89
90 static int
91 cperf_set_ops_cipher(struct rte_crypto_op **ops,
92                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
93                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
94                 const struct cperf_options *options,
95                 const struct cperf_test_vector *test_vector,
96                 uint16_t iv_offset)
97 {
98         uint16_t i;
99
100         for (i = 0; i < nb_ops; i++) {
101                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
102
103                 rte_crypto_op_attach_sym_session(ops[i], sess);
104
105                 sym_op->m_src = bufs_in[i];
106                 sym_op->m_dst = bufs_out[i];
107
108                 /* cipher parameters */
109                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
110                                 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
111                                 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
112                         sym_op->cipher.data.length = options->test_buffer_size << 3;
113                 else
114                         sym_op->cipher.data.length = options->test_buffer_size;
115
116                 sym_op->cipher.data.offset = 0;
117         }
118
119         if (options->test == CPERF_TEST_TYPE_VERIFY) {
120                 for (i = 0; i < nb_ops; i++) {
121                         uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
122                                         uint8_t *, iv_offset);
123
124                         memcpy(iv_ptr, test_vector->cipher_iv.data,
125                                         test_vector->cipher_iv.length);
126
127                 }
128         }
129
130         return 0;
131 }
132
133 static int
134 cperf_set_ops_auth(struct rte_crypto_op **ops,
135                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
136                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
137                 const struct cperf_options *options,
138                 const struct cperf_test_vector *test_vector,
139                 uint16_t iv_offset)
140 {
141         uint16_t i;
142
143         for (i = 0; i < nb_ops; i++) {
144                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
145
146                 rte_crypto_op_attach_sym_session(ops[i], sess);
147
148                 sym_op->m_src = bufs_in[i];
149                 sym_op->m_dst = bufs_out[i];
150
151                 if (test_vector->auth_iv.length) {
152                         uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
153                                                                 uint8_t *,
154                                                                 iv_offset);
155                         memcpy(iv_ptr, test_vector->auth_iv.data,
156                                         test_vector->auth_iv.length);
157                 }
158
159                 /* authentication parameters */
160                 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
161                         sym_op->auth.digest.data = test_vector->digest.data;
162                         sym_op->auth.digest.phys_addr =
163                                         test_vector->digest.phys_addr;
164                         sym_op->auth.digest.length = options->auth_digest_sz;
165                 } else {
166
167                         uint32_t offset = options->test_buffer_size;
168                         struct rte_mbuf *buf, *tbuf;
169
170                         if (options->out_of_place) {
171                                 buf =  bufs_out[i];
172                         } else {
173                                 tbuf =  bufs_in[i];
174                                 while ((tbuf->next != NULL) &&
175                                                 (offset >= tbuf->data_len)) {
176                                         offset -= tbuf->data_len;
177                                         tbuf = tbuf->next;
178                                 }
179                                 buf = tbuf;
180                         }
181
182                         sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
183                                         uint8_t *, offset);
184                         sym_op->auth.digest.phys_addr =
185                                         rte_pktmbuf_mtophys_offset(buf, offset);
186                         sym_op->auth.digest.length = options->auth_digest_sz;
187                         sym_op->auth.aad.phys_addr = test_vector->aad.phys_addr;
188                         sym_op->auth.aad.data = test_vector->aad.data;
189
190                 }
191
192                 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
193                                 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
194                                 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
195                         sym_op->auth.data.length = options->test_buffer_size << 3;
196                 else
197                         sym_op->auth.data.length = options->test_buffer_size;
198
199                 sym_op->auth.data.offset = 0;
200         }
201
202         if (options->test == CPERF_TEST_TYPE_VERIFY) {
203                 if (test_vector->auth_iv.length) {
204                         for (i = 0; i < nb_ops; i++) {
205                                 uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
206                                                 uint8_t *, iv_offset);
207
208                                 memcpy(iv_ptr, test_vector->auth_iv.data,
209                                                 test_vector->auth_iv.length);
210                         }
211                 }
212         }
213         return 0;
214 }
215
216 static int
217 cperf_set_ops_cipher_auth(struct rte_crypto_op **ops,
218                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
219                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
220                 const struct cperf_options *options,
221                 const struct cperf_test_vector *test_vector,
222                 uint16_t iv_offset)
223 {
224         uint16_t i;
225
226         for (i = 0; i < nb_ops; i++) {
227                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
228
229                 rte_crypto_op_attach_sym_session(ops[i], sess);
230
231                 sym_op->m_src = bufs_in[i];
232                 sym_op->m_dst = bufs_out[i];
233
234                 /* cipher parameters */
235                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_SNOW3G_UEA2 ||
236                                 options->cipher_algo == RTE_CRYPTO_CIPHER_KASUMI_F8 ||
237                                 options->cipher_algo == RTE_CRYPTO_CIPHER_ZUC_EEA3)
238                         sym_op->cipher.data.length = options->test_buffer_size << 3;
239                 else
240                         sym_op->cipher.data.length = options->test_buffer_size;
241
242                 sym_op->cipher.data.offset = 0;
243
244                 /* authentication parameters */
245                 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
246                         sym_op->auth.digest.data = test_vector->digest.data;
247                         sym_op->auth.digest.phys_addr =
248                                         test_vector->digest.phys_addr;
249                         sym_op->auth.digest.length = options->auth_digest_sz;
250                 } else {
251
252                         uint32_t offset = options->test_buffer_size;
253                         struct rte_mbuf *buf, *tbuf;
254
255                         if (options->out_of_place) {
256                                 buf =  bufs_out[i];
257                         } else {
258                                 tbuf =  bufs_in[i];
259                                 while ((tbuf->next != NULL) &&
260                                                 (offset >= tbuf->data_len)) {
261                                         offset -= tbuf->data_len;
262                                         tbuf = tbuf->next;
263                                 }
264                                 buf = tbuf;
265                         }
266
267                         sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
268                                         uint8_t *, offset);
269                         sym_op->auth.digest.phys_addr =
270                                         rte_pktmbuf_mtophys_offset(buf, offset);
271                         sym_op->auth.digest.length = options->auth_digest_sz;
272                         sym_op->auth.aad.phys_addr = test_vector->aad.phys_addr;
273                         sym_op->auth.aad.data = test_vector->aad.data;
274                 }
275
276                 if (options->auth_algo == RTE_CRYPTO_AUTH_SNOW3G_UIA2 ||
277                                 options->auth_algo == RTE_CRYPTO_AUTH_KASUMI_F9 ||
278                                 options->auth_algo == RTE_CRYPTO_AUTH_ZUC_EIA3)
279                         sym_op->auth.data.length = options->test_buffer_size << 3;
280                 else
281                         sym_op->auth.data.length = options->test_buffer_size;
282
283                 sym_op->auth.data.offset = 0;
284         }
285
286         if (options->test == CPERF_TEST_TYPE_VERIFY) {
287                 for (i = 0; i < nb_ops; i++) {
288                         uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
289                                         uint8_t *, iv_offset);
290
291                         memcpy(iv_ptr, test_vector->cipher_iv.data,
292                                         test_vector->cipher_iv.length);
293                         if (test_vector->auth_iv.length) {
294                                 /*
295                                  * Copy IV after the crypto operation and
296                                  * the cipher IV
297                                  */
298                                 iv_ptr += test_vector->cipher_iv.length;
299                                 memcpy(iv_ptr, test_vector->auth_iv.data,
300                                                 test_vector->auth_iv.length);
301                         }
302                 }
303
304         }
305
306         return 0;
307 }
308
309 static int
310 cperf_set_ops_aead(struct rte_crypto_op **ops,
311                 struct rte_mbuf **bufs_in, struct rte_mbuf **bufs_out,
312                 uint16_t nb_ops, struct rte_cryptodev_sym_session *sess,
313                 const struct cperf_options *options,
314                 const struct cperf_test_vector *test_vector,
315                 uint16_t iv_offset)
316 {
317         uint16_t i;
318
319         for (i = 0; i < nb_ops; i++) {
320                 struct rte_crypto_sym_op *sym_op = ops[i]->sym;
321
322                 rte_crypto_op_attach_sym_session(ops[i], sess);
323
324                 sym_op->m_src = bufs_in[i];
325                 sym_op->m_dst = bufs_out[i];
326
327                 /* cipher parameters */
328                 sym_op->cipher.data.length = options->test_buffer_size;
329                 sym_op->cipher.data.offset =
330                                 RTE_ALIGN_CEIL(options->auth_aad_sz, 16);
331
332                 sym_op->auth.aad.data = rte_pktmbuf_mtod(bufs_in[i], uint8_t *);
333                 sym_op->auth.aad.phys_addr = rte_pktmbuf_mtophys(bufs_in[i]);
334
335                 /* authentication parameters */
336                 if (options->auth_op == RTE_CRYPTO_AUTH_OP_VERIFY) {
337                         sym_op->auth.digest.data = test_vector->digest.data;
338                         sym_op->auth.digest.phys_addr =
339                                         test_vector->digest.phys_addr;
340                         sym_op->auth.digest.length = options->auth_digest_sz;
341                 } else {
342
343                         uint32_t offset = sym_op->cipher.data.length +
344                                                 sym_op->cipher.data.offset;
345                         struct rte_mbuf *buf, *tbuf;
346
347                         if (options->out_of_place) {
348                                 buf =  bufs_out[i];
349                         } else {
350                                 tbuf =  bufs_in[i];
351                                 while ((tbuf->next != NULL) &&
352                                                 (offset >= tbuf->data_len)) {
353                                         offset -= tbuf->data_len;
354                                         tbuf = tbuf->next;
355                                 }
356                                 buf = tbuf;
357                         }
358
359                         sym_op->auth.digest.data = rte_pktmbuf_mtod_offset(buf,
360                                         uint8_t *, offset);
361                         sym_op->auth.digest.phys_addr =
362                                         rte_pktmbuf_mtophys_offset(buf, offset);
363
364                         sym_op->auth.digest.length = options->auth_digest_sz;
365                 }
366
367                 sym_op->auth.data.length = options->test_buffer_size;
368                 sym_op->auth.data.offset = options->auth_aad_sz;
369         }
370
371         if (options->test == CPERF_TEST_TYPE_VERIFY) {
372                 for (i = 0; i < nb_ops; i++) {
373                         uint8_t *iv_ptr = rte_crypto_op_ctod_offset(ops[i],
374                                         uint8_t *, iv_offset);
375
376                         memcpy(iv_ptr, test_vector->cipher_iv.data,
377                                         test_vector->cipher_iv.length);
378                 }
379         }
380
381         return 0;
382 }
383
384 static struct rte_cryptodev_sym_session *
385 cperf_create_session(uint8_t dev_id,
386         const struct cperf_options *options,
387         const struct cperf_test_vector *test_vector,
388         uint16_t iv_offset)
389 {
390         struct rte_crypto_sym_xform cipher_xform;
391         struct rte_crypto_sym_xform auth_xform;
392         struct rte_cryptodev_sym_session *sess = NULL;
393
394         /*
395          * cipher only
396          */
397         if (options->op_type == CPERF_CIPHER_ONLY) {
398                 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
399                 cipher_xform.next = NULL;
400                 cipher_xform.cipher.algo = options->cipher_algo;
401                 cipher_xform.cipher.op = options->cipher_op;
402                 cipher_xform.cipher.iv.offset = iv_offset;
403
404                 /* cipher different than null */
405                 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
406                         cipher_xform.cipher.key.data =
407                                         test_vector->cipher_key.data;
408                         cipher_xform.cipher.key.length =
409                                         test_vector->cipher_key.length;
410                         cipher_xform.cipher.iv.length =
411                                         test_vector->cipher_iv.length;
412                 } else {
413                         cipher_xform.cipher.key.data = NULL;
414                         cipher_xform.cipher.key.length = 0;
415                         cipher_xform.cipher.iv.length = 0;
416                 }
417                 /* create crypto session */
418                 sess = rte_cryptodev_sym_session_create(dev_id, &cipher_xform);
419         /*
420          *  auth only
421          */
422         } else if (options->op_type == CPERF_AUTH_ONLY) {
423                 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
424                 auth_xform.next = NULL;
425                 auth_xform.auth.algo = options->auth_algo;
426                 auth_xform.auth.op = options->auth_op;
427
428                 /* auth different than null */
429                 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
430                         auth_xform.auth.digest_length =
431                                         options->auth_digest_sz;
432                         auth_xform.auth.add_auth_data_length =
433                                         options->auth_aad_sz;
434                         auth_xform.auth.key.length =
435                                         test_vector->auth_key.length;
436                         auth_xform.auth.key.data = test_vector->auth_key.data;
437                         auth_xform.auth.iv.length =
438                                         test_vector->auth_iv.length;
439                 } else {
440                         auth_xform.auth.digest_length = 0;
441                         auth_xform.auth.add_auth_data_length = 0;
442                         auth_xform.auth.key.length = 0;
443                         auth_xform.auth.key.data = NULL;
444                         auth_xform.auth.iv.length = 0;
445                 }
446                 /* create crypto session */
447                 sess =  rte_cryptodev_sym_session_create(dev_id, &auth_xform);
448         /*
449          * cipher and auth
450          */
451         } else if (options->op_type == CPERF_CIPHER_THEN_AUTH
452                         || options->op_type == CPERF_AUTH_THEN_CIPHER
453                         || options->op_type == CPERF_AEAD) {
454
455                 /*
456                  * cipher
457                  */
458                 cipher_xform.type = RTE_CRYPTO_SYM_XFORM_CIPHER;
459                 cipher_xform.next = NULL;
460                 cipher_xform.cipher.algo = options->cipher_algo;
461                 cipher_xform.cipher.op = options->cipher_op;
462                 cipher_xform.cipher.iv.offset = iv_offset;
463
464                 /* cipher different than null */
465                 if (options->cipher_algo != RTE_CRYPTO_CIPHER_NULL) {
466                         cipher_xform.cipher.key.data =
467                                         test_vector->cipher_key.data;
468                         cipher_xform.cipher.key.length =
469                                         test_vector->cipher_key.length;
470                         cipher_xform.cipher.iv.length =
471                                         test_vector->cipher_iv.length;
472                 } else {
473                         cipher_xform.cipher.key.data = NULL;
474                         cipher_xform.cipher.key.length = 0;
475                         cipher_xform.cipher.iv.length = 0;
476                 }
477
478                 /*
479                  * auth
480                  */
481                 auth_xform.type = RTE_CRYPTO_SYM_XFORM_AUTH;
482                 auth_xform.next = NULL;
483                 auth_xform.auth.algo = options->auth_algo;
484                 auth_xform.auth.op = options->auth_op;
485
486                 /* auth different than null */
487                 if (options->auth_algo != RTE_CRYPTO_AUTH_NULL) {
488                         auth_xform.auth.digest_length = options->auth_digest_sz;
489                         auth_xform.auth.add_auth_data_length =
490                                         options->auth_aad_sz;
491                         /* auth options for aes gcm */
492                         if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM &&
493                                 options->auth_algo == RTE_CRYPTO_AUTH_AES_GCM) {
494                                 auth_xform.auth.key.length = 0;
495                                 auth_xform.auth.key.data = NULL;
496                                 auth_xform.auth.iv.length = 0;
497                         } else { /* auth options for others */
498                                 auth_xform.auth.key.length =
499                                         test_vector->auth_key.length;
500                                 auth_xform.auth.key.data =
501                                                 test_vector->auth_key.data;
502                                 auth_xform.auth.iv.length =
503                                                 test_vector->auth_iv.length;
504                         }
505                 } else {
506                         auth_xform.auth.digest_length = 0;
507                         auth_xform.auth.add_auth_data_length = 0;
508                         auth_xform.auth.key.length = 0;
509                         auth_xform.auth.key.data = NULL;
510                         auth_xform.auth.iv.length = 0;
511                 }
512
513                 /* create crypto session for aes gcm */
514                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM) {
515                         if (options->cipher_op ==
516                                         RTE_CRYPTO_CIPHER_OP_ENCRYPT) {
517                                 cipher_xform.next = &auth_xform;
518                                 /* create crypto session */
519                                 sess = rte_cryptodev_sym_session_create(dev_id,
520                                         &cipher_xform);
521                         } else { /* decrypt */
522                                 auth_xform.next = &cipher_xform;
523                                 /* create crypto session */
524                                 sess = rte_cryptodev_sym_session_create(dev_id,
525                                         &auth_xform);
526                         }
527                 } else { /* create crypto session for other */
528                         /* cipher then auth */
529                         if (options->op_type == CPERF_CIPHER_THEN_AUTH) {
530                                 cipher_xform.next = &auth_xform;
531                                 /* create crypto session */
532                                 sess = rte_cryptodev_sym_session_create(dev_id,
533                                                 &cipher_xform);
534                         } else { /* auth then cipher */
535                                 auth_xform.next = &cipher_xform;
536                                 /* create crypto session */
537                                 sess = rte_cryptodev_sym_session_create(dev_id,
538                                                 &auth_xform);
539                         }
540                 }
541         }
542         return sess;
543 }
544
545 int
546 cperf_get_op_functions(const struct cperf_options *options,
547                 struct cperf_op_fns *op_fns)
548 {
549         memset(op_fns, 0, sizeof(struct cperf_op_fns));
550
551         op_fns->sess_create = cperf_create_session;
552
553         if (options->op_type == CPERF_AEAD
554                         || options->op_type == CPERF_AUTH_THEN_CIPHER
555                         || options->op_type == CPERF_CIPHER_THEN_AUTH) {
556                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_AES_GCM &&
557                                 options->auth_algo == RTE_CRYPTO_AUTH_AES_GCM)
558                         op_fns->populate_ops = cperf_set_ops_aead;
559                 else
560                         op_fns->populate_ops = cperf_set_ops_cipher_auth;
561                 return 0;
562         }
563         if (options->op_type == CPERF_AUTH_ONLY) {
564                 if (options->auth_algo == RTE_CRYPTO_AUTH_NULL)
565                         op_fns->populate_ops = cperf_set_ops_null_auth;
566                 else
567                         op_fns->populate_ops = cperf_set_ops_auth;
568                 return 0;
569         }
570         if (options->op_type == CPERF_CIPHER_ONLY) {
571                 if (options->cipher_algo == RTE_CRYPTO_CIPHER_NULL)
572                         op_fns->populate_ops = cperf_set_ops_null_cipher;
573                 else
574                         op_fns->populate_ops = cperf_set_ops_cipher;
575                 return 0;
576         }
577
578         return -1;
579 }