X-Git-Url: http://git.droids-corp.org/?a=blobdiff_plain;f=lib%2Flibrte_cryptodev%2Frte_crypto_sym.h;h=bc356f6ff0746c4afaaac99166ddc31ac31dde55;hb=7e9165b1beb77554916686cb4ff4d16c4e995b74;hp=c80e90ee83768000719d884bfb2c4a37fa208314;hpb=a76e869f663a8ab0fe1d5a8a5bbd4caf522b0738;p=dpdk.git diff --git a/lib/librte_cryptodev/rte_crypto_sym.h b/lib/librte_cryptodev/rte_crypto_sym.h index c80e90ee83..bc356f6ff0 100644 --- a/lib/librte_cryptodev/rte_crypto_sym.h +++ b/lib/librte_cryptodev/rte_crypto_sym.h @@ -114,8 +114,8 @@ struct rte_crypto_cipher_xform { /**< Cipher algorithm */ struct { - uint8_t *data; /**< pointer to key data */ - uint16_t length;/**< key length in bytes */ + const uint8_t *data; /**< pointer to key data */ + uint16_t length; /**< key length in bytes */ } key; /**< Cipher key * @@ -152,11 +152,6 @@ struct rte_crypto_cipher_xform { * * - For block ciphers in CTR mode, this is the counter. * - * - For GCM mode, this is either the IV (if the length - * is 96 bits) or J0 (for other sizes), where J0 is as - * defined by NIST SP800-38D. Regardless of the IV - * length, a full 16 bytes needs to be allocated. - * * - For CCM mode, the first byte is reserved, and the * nonce should be written starting at &iv[1] (to allow * space for the implementation to write in the flags @@ -184,9 +179,6 @@ struct rte_crypto_cipher_xform { * of the counter (which must be the same as the block * length of the cipher). * - * - For GCM mode, this is either 12 (for 96-bit IVs) - * or 16, in which case data points to J0. - * * - For CCM mode, this is the length of the nonce, * which can be in the range 7 to 13 inclusive. */ @@ -290,8 +282,8 @@ struct rte_crypto_auth_xform { /**< Authentication algorithm selection */ struct { - uint8_t *data; /**< pointer to key data */ - uint16_t length;/**< key length in bytes */ + const uint8_t *data; /**< pointer to key data */ + uint16_t length; /**< key length in bytes */ } key; /**< Authentication key data. * The authentication key length MUST be less than or equal to the @@ -306,9 +298,10 @@ struct rte_crypto_auth_xform { * specified as number of bytes from start of crypto * operation (rte_crypto_op). * - * - For SNOW 3G in UIA2 mode, for ZUC in EIA3 mode and - * for AES-GMAC, this is the authentication - * Initialisation Vector (IV) value. + * - For SNOW 3G in UIA2 mode, for ZUC in EIA3 mode + * this is the authentication Initialisation Vector + * (IV) value. For AES-GMAC IV description please refer + * to the field `length` in iv struct. * * - For KASUMI in F9 mode and other authentication * algorithms, this field is not used. @@ -325,6 +318,14 @@ struct rte_crypto_auth_xform { * - For KASUMI in F9 mode and other authentication * algorithms, this field is not used. * + * - For GMAC mode, this is either: + * 1) Number greater or equal to one, which means that IV + * is used and J0 will be computed internally, a minimum + * of 16 bytes must be allocated. + * 2) Zero, in which case data points to J0. In this case + * 16 bytes of J0 should be passed where J0 is defined + * by NIST SP800-38D. + * */ } iv; /**< Initialisation vector parameters */ @@ -347,6 +348,8 @@ enum rte_crypto_aead_algorithm { /**< AES algorithm in CCM mode. */ RTE_CRYPTO_AEAD_AES_GCM, /**< AES algorithm in GCM mode. */ + RTE_CRYPTO_AEAD_CHACHA20_POLY1305, + /**< Chacha20 cipher with poly1305 authenticator */ RTE_CRYPTO_AEAD_LIST_END }; @@ -373,8 +376,8 @@ struct rte_crypto_aead_xform { /**< AEAD algorithm selection */ struct { - uint8_t *data; /**< pointer to key data */ - uint16_t length;/**< key length in bytes */ + const uint8_t *data; /**< pointer to key data */ + uint16_t length; /**< key length in bytes */ } key; struct { @@ -383,11 +386,6 @@ struct rte_crypto_aead_xform { * specified as number of bytes from start of crypto * operation (rte_crypto_op). * - * - For GCM mode, this is either the IV (if the length - * is 96 bits) or J0 (for other sizes), where J0 is as - * defined by NIST SP800-38D. Regardless of the IV - * length, a full 16 bytes needs to be allocated. - * * - For CCM mode, the first byte is reserved, and the * nonce should be written starting at &iv[1] (to allow * space for the implementation to write in the flags @@ -395,17 +393,29 @@ struct rte_crypto_aead_xform { * be allocated, even though the length field will * have a value less than this. * + * - For Chacha20-Poly1305 it is 96-bit nonce. + * PMD sets initial counter for Poly1305 key generation + * part to 0 and for Chacha20 encryption to 1 as per + * rfc8439 2.8. AEAD construction. + * * For optimum performance, the data pointed to SHOULD * be 8-byte aligned. */ uint16_t length; /**< Length of valid IV data. * - * - For GCM mode, this is either 12 (for 96-bit IVs) - * or 16, in which case data points to J0. + * - For GCM mode, this is either: + * 1) Number greater or equal to one, which means that IV + * is used and J0 will be computed internally, a minimum + * of 16 bytes must be allocated. + * 2) Zero, in which case data points to J0. In this case + * 16 bytes of J0 should be passed where J0 is defined + * by NIST SP800-38D. * * - For CCM mode, this is the length of the nonce, * which can be in the range 7 to 13 inclusive. + * + * - For Chacha20-Poly1305 this field is always 12. */ } iv; /**< Initialisation vector parameters */ @@ -589,7 +599,9 @@ struct rte_crypto_sym_op { * For SNOW 3G @ RTE_CRYPTO_CIPHER_SNOW3G_UEA2, * KASUMI @ RTE_CRYPTO_CIPHER_KASUMI_F8 * and ZUC @ RTE_CRYPTO_CIPHER_ZUC_EEA3, - * this field should be in bits. + * this field should be in bits. For + * digest-encrypted cases this must be + * an 8-bit multiple. */ uint32_t length; /**< The message length, in bytes, of the @@ -603,7 +615,9 @@ struct rte_crypto_sym_op { * For SNOW 3G @ RTE_CRYPTO_AUTH_SNOW3G_UEA2, * KASUMI @ RTE_CRYPTO_CIPHER_KASUMI_F8 * and ZUC @ RTE_CRYPTO_CIPHER_ZUC_EEA3, - * this field should be in bits. + * this field should be in bits. For + * digest-encrypted cases this must be + * an 8-bit multiple. */ } data; /**< Data offsets and length for ciphering */ } cipher; @@ -619,7 +633,9 @@ struct rte_crypto_sym_op { * For SNOW 3G @ RTE_CRYPTO_AUTH_SNOW3G_UIA2, * KASUMI @ RTE_CRYPTO_AUTH_KASUMI_F9 * and ZUC @ RTE_CRYPTO_AUTH_ZUC_EIA3, - * this field should be in bits. + * this field should be in bits. For + * digest-encrypted cases this must be + * an 8-bit multiple. * * @note * For KASUMI @ RTE_CRYPTO_AUTH_KASUMI_F9, @@ -634,7 +650,9 @@ struct rte_crypto_sym_op { * For SNOW 3G @ RTE_CRYPTO_AUTH_SNOW3G_UIA2, * KASUMI @ RTE_CRYPTO_AUTH_KASUMI_F9 * and ZUC @ RTE_CRYPTO_AUTH_ZUC_EIA3, - * this field should be in bits. + * this field should be in bits. For + * digest-encrypted cases this must be + * an 8-bit multiple. * * @note * For KASUMI @ RTE_CRYPTO_AUTH_KASUMI_F9, @@ -665,6 +683,57 @@ struct rte_crypto_sym_op { * For digest generation, the digest result * will overwrite any data at this location. * + * @note + * Digest-encrypted case. + * Digest can be generated, appended to + * the end of raw data and encrypted + * together using chained digest + * generation + * (@ref RTE_CRYPTO_AUTH_OP_GENERATE) + * and encryption + * (@ref RTE_CRYPTO_CIPHER_OP_ENCRYPT) + * xforms. Similarly, authentication + * of the raw data against appended, + * decrypted digest, can be performed + * using decryption + * (@ref RTE_CRYPTO_CIPHER_OP_DECRYPT) + * and digest verification + * (@ref RTE_CRYPTO_AUTH_OP_VERIFY) + * chained xforms. + * To perform those operations, a few + * additional conditions must be met: + * - caller must allocate at least + * digest_length of memory at the end of + * source and (in case of out-of-place + * operations) destination buffer; those + * buffers can be linear or split using + * scatter-gather lists, + * - digest data pointer must point to + * the end of source or (in case of + * out-of-place operations) destination + * data, which is pointer to the + * data buffer + auth.data.offset + + * auth.data.length, + * - cipher.data.offset + + * cipher.data.length must be greater + * than auth.data.offset + + * auth.data.length and is typically + * equal to auth.data.offset + + * auth.data.length + digest_length. + * - for wireless algorithms, i.e. + * SNOW 3G, KASUMI and ZUC, as the + * cipher.data.length, + * cipher.data.offset, + * auth.data.length and + * auth.data.offset are in bits, they + * must be 8-bit multiples. + * + * Note, that for security reasons, it + * is PMDs' responsibility to not + * leave an unencrypted digest in any + * buffer after performing auth-cipher + * operations. + * */ rte_iova_t phys_addr; /**< Physical address of digest */