1 From 11b55103463bac614e00d74e9f196ec4ec6bade1 Mon Sep 17 00:00:00 2001
2 From: Cristian Stoica <cristian.stoica@freescale.com>
3 Date: Mon, 16 Jun 2014 14:06:21 +0300
4 Subject: [PATCH 17/26] cryptodev: add support for aes-gcm algorithm offloading
6 Change-Id: I3b77dc5ef8b8f707309549244a02852d95b36168
7 Signed-off-by: Cristian Stoica <cristian.stoica@freescale.com>
8 Reviewed-on: http://git.am.freescale.net:8181/17226
11 crypto/engine/eng_cryptodev.c | 229 +++++++++++++++++++++++++++++++++++++++++-
12 2 files changed, 233 insertions(+), 2 deletions(-)
14 diff --git a/apps/speed.c b/apps/speed.c
15 index 9886ca3..099dede 100644
22 -#define BUFSIZE ((long)1024*8+1)
23 +/* The buffer overhead allows GCM tag at the end of the encrypted data. This
24 + avoids buffer overflows from cryptodev since Linux kernel GCM
25 + implementation allways adds the tag - unlike e_aes.c:aes_gcm_cipher()
27 +#define BUFSIZE ((long)1024*8 + EVP_GCM_TLS_TAG_LEN)
31 diff --git a/crypto/engine/eng_cryptodev.c b/crypto/engine/eng_cryptodev.c
32 index 13d924f..4493490 100644
33 --- a/crypto/engine/eng_cryptodev.c
34 +++ b/crypto/engine/eng_cryptodev.c
35 @@ -78,8 +78,10 @@ struct dev_crypto_state {
36 struct session_op d_sess;
39 - unsigned int aad_len;
45 #ifdef USE_CRYPTODEV_DIGESTS
46 char dummy_mac_key[HASH_MAX_LEN];
47 @@ -251,6 +253,7 @@ static struct {
48 { CRYPTO_SKIPJACK_CBC, NID_undef, 0, 0, 0},
49 { CRYPTO_TLS10_AES_CBC_HMAC_SHA1, NID_aes_128_cbc_hmac_sha1, 16, 16, 20},
50 { CRYPTO_TLS10_AES_CBC_HMAC_SHA1, NID_aes_256_cbc_hmac_sha1, 16, 32, 20},
51 + { CRYPTO_AES_GCM, NID_aes_128_gcm, 16, 16, 0},
52 { 0, NID_undef, 0, 0, 0},
55 @@ -271,6 +274,19 @@ static struct {
59 +/* increment counter (64-bit int) by 1 */
60 +static void ctr64_inc(unsigned char *counter) {
73 * Return a fd if /dev/crypto seems usable, 0 otherwise.
75 @@ -762,6 +778,197 @@ static int cryptodev_cbc_hmac_sha1_ctrl(EVP_CIPHER_CTX *ctx, int type, int arg,
79 +static int cryptodev_init_gcm_key(EVP_CIPHER_CTX *ctx,
80 + const unsigned char *key, const unsigned char *iv, int enc)
82 + struct dev_crypto_state *state = ctx->cipher_data;
83 + struct session_op *sess = &state->d_sess;
89 + memcpy(ctx->iv, iv, ctx->cipher->iv_len);
91 + for (i = 0; ciphers[i].id; i++)
92 + if (ctx->cipher->nid == ciphers[i].nid &&
93 + ctx->cipher->iv_len <= ciphers[i].ivmax &&
94 + ctx->key_len == ciphers[i].keylen) {
95 + cipher = ciphers[i].id;
99 + if (!ciphers[i].id) {
104 + memset(sess, 0, sizeof(struct session_op));
106 + if ((state->d_fd = get_dev_crypto()) < 0)
109 + sess->key = (unsigned char *) key;
110 + sess->keylen = ctx->key_len;
111 + sess->cipher = cipher;
113 + if (ioctl(state->d_fd, CIOCGSESSION, sess) == -1) {
114 + put_dev_crypto(state->d_fd);
121 +static int cryptodev_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
122 + const unsigned char *in, size_t len)
124 + struct crypt_auth_op cryp = {0};
125 + struct dev_crypto_state *state = ctx->cipher_data;
126 + struct session_op *sess = &state->d_sess;
129 + if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ?
130 + EVP_CTRL_GCM_IV_GEN : EVP_CTRL_GCM_SET_IV_INV,
131 + EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
134 + in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
135 + out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
136 + len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
138 + if (ctx->encrypt) {
139 + len -= EVP_GCM_TLS_TAG_LEN;
141 + cryp.ses = sess->ses;
143 + cryp.src = (unsigned char*) in;
145 + cryp.auth_src = state->aad;
146 + cryp.auth_len = state->aad_len;
148 + cryp.op = ctx->encrypt ? COP_ENCRYPT : COP_DECRYPT;
150 + if (ioctl(state->d_fd, CIOCAUTHCRYPT, &cryp) == -1) {
155 + ctr64_inc(state->iv + state->ivlen - 8);
157 + rv = len - EVP_GCM_TLS_TAG_LEN;
162 +static int cryptodev_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
163 + const unsigned char *in, size_t len)
165 + struct crypt_auth_op cryp;
166 + struct dev_crypto_state *state = ctx->cipher_data;
167 + struct session_op *sess = &state->d_sess;
169 + if (state->d_fd < 0)
172 + if ((len % ctx->cipher->block_size) != 0)
175 + if (state->aad_len >= 0)
176 + return cryptodev_gcm_tls_cipher(ctx, out, in, len);
178 + memset(&cryp, 0, sizeof(cryp));
180 + cryp.ses = sess->ses;
182 + cryp.src = (unsigned char*) in;
184 + cryp.auth_src = NULL;
187 + cryp.op = ctx->encrypt ? COP_ENCRYPT : COP_DECRYPT;
189 + if (ioctl(state->d_fd, CIOCAUTHCRYPT, &cryp) == -1) {
196 +static int cryptodev_gcm_ctrl(EVP_CIPHER_CTX *ctx, int type, int arg,
199 + struct dev_crypto_state *state = ctx->cipher_data;
201 + case EVP_CTRL_INIT:
203 + state->ivlen = ctx->cipher->iv_len;
204 + state->iv = ctx->iv;
205 + state->aad_len = -1;
208 + case EVP_CTRL_GCM_SET_IV_FIXED:
210 + /* Special case: -1 length restores whole IV */
213 + memcpy(state->iv, ptr, state->ivlen);
216 + /* Fixed field must be at least 4 bytes and invocation field
219 + if ((arg < 4) || (state->ivlen - arg) < 8)
222 + memcpy(state->iv, ptr, arg);
223 + if (ctx->encrypt &&
224 + RAND_bytes(state->iv + arg, state->ivlen - arg) <= 0)
228 + case EVP_CTRL_AEAD_TLS1_AAD:
234 + memcpy(ctx->buf, ptr, arg);
235 + len=ctx->buf[arg-2] << 8 | ctx->buf[arg-1];
237 + /* Correct length for explicit IV */
238 + len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
240 + /* If decrypting correct for tag too */
242 + len -= EVP_GCM_TLS_TAG_LEN;
244 + ctx->buf[arg-2] = len >> 8;
245 + ctx->buf[arg-1] = len & 0xff;
247 + state->aad = ctx->buf;
248 + state->aad_len = arg;
251 + /* Extra padding: tag appended to record */
252 + return EVP_GCM_TLS_TAG_LEN;
254 + case EVP_CTRL_GCM_SET_IV_INV:
258 + memcpy(state->iv + state->ivlen - arg, ptr, arg);
261 + case EVP_CTRL_GCM_IV_GEN:
262 + if (arg <= 0 || arg > state->ivlen)
263 + arg = state->ivlen;
264 + memcpy(ptr, state->iv + state->ivlen - arg, arg);
271 * libcrypto EVP stuff - this is how we get wired to EVP so the engine
272 * gets called when libcrypto requests a cipher NID.
273 @@ -901,6 +1108,23 @@ const EVP_CIPHER cryptodev_aes_256_cbc_hmac_sha1 = {
274 cryptodev_cbc_hmac_sha1_ctrl,
278 +const EVP_CIPHER cryptodev_aes_128_gcm = {
281 + EVP_CIPH_GCM_MODE | EVP_CIPH_FLAG_AEAD_CIPHER | EVP_CIPH_FLAG_DEFAULT_ASN1 \
282 + | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
283 + | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT,
284 + cryptodev_init_gcm_key,
285 + cryptodev_gcm_cipher,
287 + sizeof(struct dev_crypto_state),
288 + EVP_CIPHER_set_asn1_iv,
289 + EVP_CIPHER_get_asn1_iv,
290 + cryptodev_gcm_ctrl,
295 * Registered by the ENGINE when used to find out how to deal with
296 * a particular NID in the ENGINE. this says what we'll do at the
297 @@ -944,6 +1168,9 @@ cryptodev_engine_ciphers(ENGINE *e, const EVP_CIPHER **cipher,
298 case NID_aes_256_cbc_hmac_sha1:
299 *cipher = &cryptodev_aes_256_cbc_hmac_sha1;
301 + case NID_aes_128_gcm:
302 + *cipher = &cryptodev_aes_128_gcm;