1 From 317e3d9870097e6b115dd8c9a13ccb5e5ca76f2e Mon Sep 17 00:00:00 2001
2 From: Cristian Stoica <cristian.stoica@freescale.com>
3 Date: Mon, 16 Jun 2014 14:06:21 +0300
4 Subject: [PATCH 14/48] cryptodev: add support for aes-gcm algorithm offloading
6 Signed-off-by: Cristian Stoica <cristian.stoica@freescale.com>
9 crypto/engine/eng_cryptodev.c | 236 +++++++++++++++++++++++++++++++++++++++++-
10 2 files changed, 240 insertions(+), 2 deletions(-)
12 diff --git a/apps/speed.c b/apps/speed.c
13 index 95adcc1..e5e609b 100644
20 -# define BUFSIZE ((long)1024*8+1)
21 +/* The buffer overhead allows GCM tag at the end of the encrypted data. This
22 + avoids buffer overflows from cryptodev since Linux kernel GCM
23 + implementation allways adds the tag - unlike e_aes.c:aes_gcm_cipher()
25 +#define BUFSIZE ((long)1024*8 + EVP_GCM_TLS_TAG_LEN)
26 static volatile int run = 0;
29 diff --git a/crypto/engine/eng_cryptodev.c b/crypto/engine/eng_cryptodev.c
30 index 4929ae6..d2cdca0 100644
31 --- a/crypto/engine/eng_cryptodev.c
32 +++ b/crypto/engine/eng_cryptodev.c
34 * Copyright (c) 2002 Bob Beck <beck@openbsd.org>
35 * Copyright (c) 2002 Theo de Raadt
36 * Copyright (c) 2002 Markus Friedl
37 + * Copyright (c) 2013-2014 Freescale Semiconductor, Inc.
38 * All rights reserved.
40 * Redistribution and use in source and binary forms, with or without
41 @@ -77,8 +78,10 @@ struct dev_crypto_state {
42 struct session_op d_sess;
45 - unsigned int aad_len;
50 # ifdef USE_CRYPTODEV_DIGESTS
51 char dummy_mac_key[HASH_MAX_LEN];
52 unsigned char digest_res[HASH_MAX_LEN];
53 @@ -287,6 +290,9 @@ static struct {
54 CRYPTO_TLS10_AES_CBC_HMAC_SHA1, NID_aes_256_cbc_hmac_sha1, 16, 32, 20
57 + CRYPTO_AES_GCM, NID_aes_128_gcm, 16, 16, 0
63 @@ -325,6 +331,22 @@ static struct {
67 +/* increment counter (64-bit int) by 1 */
68 +static void ctr64_inc(unsigned char *counter)
84 * Return a fd if /dev/crypto seems usable, 0 otherwise.
86 @@ -807,6 +829,199 @@ static int cryptodev_cbc_hmac_sha1_ctrl(EVP_CIPHER_CTX *ctx, int type,
90 +static int cryptodev_init_gcm_key(EVP_CIPHER_CTX *ctx,
91 + const unsigned char *key,
92 + const unsigned char *iv, int enc)
94 + struct dev_crypto_state *state = ctx->cipher_data;
95 + struct session_op *sess = &state->d_sess;
101 + memcpy(ctx->iv, iv, ctx->cipher->iv_len);
103 + for (i = 0; ciphers[i].id; i++)
104 + if (ctx->cipher->nid == ciphers[i].nid &&
105 + ctx->cipher->iv_len <= ciphers[i].ivmax &&
106 + ctx->key_len == ciphers[i].keylen) {
107 + cipher = ciphers[i].id;
111 + if (!ciphers[i].id) {
116 + memset(sess, 0, sizeof(struct session_op));
118 + if ((state->d_fd = get_dev_crypto()) < 0)
121 + sess->key = (unsigned char *)key;
122 + sess->keylen = ctx->key_len;
123 + sess->cipher = cipher;
125 + if (ioctl(state->d_fd, CIOCGSESSION, sess) == -1) {
126 + put_dev_crypto(state->d_fd);
133 +static int cryptodev_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
134 + const unsigned char *in, size_t len)
136 + struct crypt_auth_op cryp = { 0 };
137 + struct dev_crypto_state *state = ctx->cipher_data;
138 + struct session_op *sess = &state->d_sess;
141 + if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ?
142 + EVP_CTRL_GCM_IV_GEN : EVP_CTRL_GCM_SET_IV_INV,
143 + EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
146 + in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
147 + out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
148 + len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
150 + if (ctx->encrypt) {
151 + len -= EVP_GCM_TLS_TAG_LEN;
153 + cryp.ses = sess->ses;
155 + cryp.src = (unsigned char *)in;
157 + cryp.auth_src = state->aad;
158 + cryp.auth_len = state->aad_len;
160 + cryp.op = ctx->encrypt ? COP_ENCRYPT : COP_DECRYPT;
162 + if (ioctl(state->d_fd, CIOCAUTHCRYPT, &cryp) == -1) {
167 + ctr64_inc(state->iv + state->ivlen - 8);
169 + rv = len - EVP_GCM_TLS_TAG_LEN;
174 +static int cryptodev_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
175 + const unsigned char *in, size_t len)
177 + struct crypt_auth_op cryp;
178 + struct dev_crypto_state *state = ctx->cipher_data;
179 + struct session_op *sess = &state->d_sess;
181 + if (state->d_fd < 0)
184 + if ((len % ctx->cipher->block_size) != 0)
187 + if (state->aad_len >= 0)
188 + return cryptodev_gcm_tls_cipher(ctx, out, in, len);
190 + memset(&cryp, 0, sizeof(cryp));
192 + cryp.ses = sess->ses;
194 + cryp.src = (unsigned char *)in;
196 + cryp.auth_src = NULL;
199 + cryp.op = ctx->encrypt ? COP_ENCRYPT : COP_DECRYPT;
201 + if (ioctl(state->d_fd, CIOCAUTHCRYPT, &cryp) == -1) {
208 +static int cryptodev_gcm_ctrl(EVP_CIPHER_CTX *ctx, int type, int arg,
211 + struct dev_crypto_state *state = ctx->cipher_data;
213 + case EVP_CTRL_INIT:
215 + state->ivlen = ctx->cipher->iv_len;
216 + state->iv = ctx->iv;
217 + state->aad_len = -1;
220 + case EVP_CTRL_GCM_SET_IV_FIXED:
222 + /* Special case: -1 length restores whole IV */
224 + memcpy(state->iv, ptr, state->ivlen);
228 + * Fixed field must be at least 4 bytes and invocation field at
231 + if ((arg < 4) || (state->ivlen - arg) < 8)
234 + memcpy(state->iv, ptr, arg);
235 + if (ctx->encrypt &&
236 + RAND_bytes(state->iv + arg, state->ivlen - arg) <= 0)
240 + case EVP_CTRL_AEAD_TLS1_AAD:
246 + memcpy(ctx->buf, ptr, arg);
247 + len = ctx->buf[arg - 2] << 8 | ctx->buf[arg - 1];
249 + /* Correct length for explicit IV */
250 + len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
252 + /* If decrypting correct for tag too */
254 + len -= EVP_GCM_TLS_TAG_LEN;
256 + ctx->buf[arg - 2] = len >> 8;
257 + ctx->buf[arg - 1] = len & 0xff;
259 + state->aad = ctx->buf;
260 + state->aad_len = arg;
263 + /* Extra padding: tag appended to record */
264 + return EVP_GCM_TLS_TAG_LEN;
266 + case EVP_CTRL_GCM_SET_IV_INV:
270 + memcpy(state->iv + state->ivlen - arg, ptr, arg);
273 + case EVP_CTRL_GCM_IV_GEN:
274 + if (arg <= 0 || arg > state->ivlen)
275 + arg = state->ivlen;
276 + memcpy(ptr, state->iv + state->ivlen - arg, arg);
284 * libcrypto EVP stuff - this is how we get wired to EVP so the engine
285 * gets called when libcrypto requests a cipher NID.
286 @@ -947,6 +1162,22 @@ const EVP_CIPHER cryptodev_aes_256_cbc_hmac_sha1 = {
290 +const EVP_CIPHER cryptodev_aes_128_gcm = {
293 + EVP_CIPH_GCM_MODE | EVP_CIPH_FLAG_AEAD_CIPHER | EVP_CIPH_FLAG_DEFAULT_ASN1
294 + | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER
295 + | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT,
296 + cryptodev_init_gcm_key,
297 + cryptodev_gcm_cipher,
299 + sizeof(struct dev_crypto_state),
300 + EVP_CIPHER_set_asn1_iv,
301 + EVP_CIPHER_get_asn1_iv,
302 + cryptodev_gcm_ctrl,
306 # ifdef CRYPTO_AES_CTR
307 const EVP_CIPHER cryptodev_aes_ctr = {
309 @@ -1041,6 +1272,9 @@ cryptodev_engine_ciphers(ENGINE *e, const EVP_CIPHER **cipher,
310 case NID_aes_256_cbc_hmac_sha1:
311 *cipher = &cryptodev_aes_256_cbc_hmac_sha1;
313 + case NID_aes_128_gcm:
314 + *cipher = &cryptodev_aes_128_gcm;