]> code.ossystems Code Review - meta-freescale.git/blob
5a8c2d290a5d3e1f8db5b73f66f048b14d7ce08e
[meta-freescale.git] /
1 From 317e3d9870097e6b115dd8c9a13ccb5e5ca76f2e Mon Sep 17 00:00:00 2001
2 From: Cristian Stoica <cristian.stoica@freescale.com>
3 Date: Mon, 16 Jun 2014 14:06:21 +0300
4 Subject: [PATCH 14/48] cryptodev: add support for aes-gcm algorithm offloading
5
6 Signed-off-by: Cristian Stoica <cristian.stoica@freescale.com>
7 ---
8  apps/speed.c                  |   6 +-
9  crypto/engine/eng_cryptodev.c | 236 +++++++++++++++++++++++++++++++++++++++++-
10  2 files changed, 240 insertions(+), 2 deletions(-)
11
12 diff --git a/apps/speed.c b/apps/speed.c
13 index 95adcc1..e5e609b 100644
14 --- a/apps/speed.c
15 +++ b/apps/speed.c
16 @@ -226,7 +226,11 @@
17  # endif
18  
19  # undef BUFSIZE
20 -# define BUFSIZE ((long)1024*8+1)
21 +/* The buffer overhead allows GCM tag at the end of the encrypted data. This
22 +   avoids buffer overflows from cryptodev since Linux kernel GCM
23 +   implementation allways adds the tag - unlike e_aes.c:aes_gcm_cipher()
24 +   which doesn't */
25 +#define BUFSIZE        ((long)1024*8 + EVP_GCM_TLS_TAG_LEN)
26  static volatile int run = 0;
27  
28  static int mr = 0;
29 diff --git a/crypto/engine/eng_cryptodev.c b/crypto/engine/eng_cryptodev.c
30 index 4929ae6..d2cdca0 100644
31 --- a/crypto/engine/eng_cryptodev.c
32 +++ b/crypto/engine/eng_cryptodev.c
33 @@ -2,6 +2,7 @@
34   * Copyright (c) 2002 Bob Beck <beck@openbsd.org>
35   * Copyright (c) 2002 Theo de Raadt
36   * Copyright (c) 2002 Markus Friedl
37 + * Copyright (c) 2013-2014 Freescale Semiconductor, Inc.
38   * All rights reserved.
39   *
40   * Redistribution and use in source and binary forms, with or without
41 @@ -77,8 +78,10 @@ struct dev_crypto_state {
42      struct session_op d_sess;
43      int d_fd;
44      unsigned char *aad;
45 -    unsigned int aad_len;
46 +    int aad_len;
47      unsigned int len;
48 +    unsigned char *iv;
49 +    int ivlen;
50  # ifdef USE_CRYPTODEV_DIGESTS
51      char dummy_mac_key[HASH_MAX_LEN];
52      unsigned char digest_res[HASH_MAX_LEN];
53 @@ -287,6 +290,9 @@ static struct {
54          CRYPTO_TLS10_AES_CBC_HMAC_SHA1, NID_aes_256_cbc_hmac_sha1, 16, 32, 20
55      },
56      {
57 +        CRYPTO_AES_GCM, NID_aes_128_gcm, 16, 16, 0
58 +    },
59 +    {
60          0, NID_undef, 0, 0, 0
61      },
62  };
63 @@ -325,6 +331,22 @@ static struct {
64  };
65  # endif
66  
67 +/* increment counter (64-bit int) by 1 */
68 +static void ctr64_inc(unsigned char *counter)
69 +{
70 +    int n = 8;
71 +    unsigned char c;
72 +
73 +    do {
74 +        --n;
75 +        c = counter[n];
76 +        ++c;
77 +        counter[n] = c;
78 +        if (c)
79 +            return;
80 +    } while (n);
81 +}
82 +
83  /*
84   * Return a fd if /dev/crypto seems usable, 0 otherwise.
85   */
86 @@ -807,6 +829,199 @@ static int cryptodev_cbc_hmac_sha1_ctrl(EVP_CIPHER_CTX *ctx, int type,
87      }
88  }
89  
90 +static int cryptodev_init_gcm_key(EVP_CIPHER_CTX *ctx,
91 +                                  const unsigned char *key,
92 +                                  const unsigned char *iv, int enc)
93 +{
94 +    struct dev_crypto_state *state = ctx->cipher_data;
95 +    struct session_op *sess = &state->d_sess;
96 +    int cipher = -1, i;
97 +    if (!iv && !key)
98 +        return 1;
99 +
100 +    if (iv)
101 +        memcpy(ctx->iv, iv, ctx->cipher->iv_len);
102 +
103 +    for (i = 0; ciphers[i].id; i++)
104 +        if (ctx->cipher->nid == ciphers[i].nid &&
105 +            ctx->cipher->iv_len <= ciphers[i].ivmax &&
106 +            ctx->key_len == ciphers[i].keylen) {
107 +            cipher = ciphers[i].id;
108 +            break;
109 +        }
110 +
111 +    if (!ciphers[i].id) {
112 +        state->d_fd = -1;
113 +        return 0;
114 +    }
115 +
116 +    memset(sess, 0, sizeof(struct session_op));
117 +
118 +    if ((state->d_fd = get_dev_crypto()) < 0)
119 +        return 0;
120 +
121 +    sess->key = (unsigned char *)key;
122 +    sess->keylen = ctx->key_len;
123 +    sess->cipher = cipher;
124 +
125 +    if (ioctl(state->d_fd, CIOCGSESSION, sess) == -1) {
126 +        put_dev_crypto(state->d_fd);
127 +        state->d_fd = -1;
128 +        return 0;
129 +    }
130 +    return 1;
131 +}
132 +
133 +static int cryptodev_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
134 +                                    const unsigned char *in, size_t len)
135 +{
136 +    struct crypt_auth_op cryp = { 0 };
137 +    struct dev_crypto_state *state = ctx->cipher_data;
138 +    struct session_op *sess = &state->d_sess;
139 +    int rv = len;
140 +
141 +    if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ?
142 +                            EVP_CTRL_GCM_IV_GEN : EVP_CTRL_GCM_SET_IV_INV,
143 +                            EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
144 +        return 0;
145 +
146 +    in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
147 +    out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
148 +    len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
149 +
150 +    if (ctx->encrypt) {
151 +        len -= EVP_GCM_TLS_TAG_LEN;
152 +    }
153 +    cryp.ses = sess->ses;
154 +    cryp.len = len;
155 +    cryp.src = (unsigned char *)in;
156 +    cryp.dst = out;
157 +    cryp.auth_src = state->aad;
158 +    cryp.auth_len = state->aad_len;
159 +    cryp.iv = ctx->iv;
160 +    cryp.op = ctx->encrypt ? COP_ENCRYPT : COP_DECRYPT;
161 +
162 +    if (ioctl(state->d_fd, CIOCAUTHCRYPT, &cryp) == -1) {
163 +        return 0;
164 +    }
165 +
166 +    if (ctx->encrypt)
167 +        ctr64_inc(state->iv + state->ivlen - 8);
168 +    else
169 +        rv = len - EVP_GCM_TLS_TAG_LEN;
170 +
171 +    return rv;
172 +}
173 +
174 +static int cryptodev_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
175 +                                const unsigned char *in, size_t len)
176 +{
177 +    struct crypt_auth_op cryp;
178 +    struct dev_crypto_state *state = ctx->cipher_data;
179 +    struct session_op *sess = &state->d_sess;
180 +
181 +    if (state->d_fd < 0)
182 +        return 0;
183 +
184 +    if ((len % ctx->cipher->block_size) != 0)
185 +        return 0;
186 +
187 +    if (state->aad_len >= 0)
188 +        return cryptodev_gcm_tls_cipher(ctx, out, in, len);
189 +
190 +    memset(&cryp, 0, sizeof(cryp));
191 +
192 +    cryp.ses = sess->ses;
193 +    cryp.len = len;
194 +    cryp.src = (unsigned char *)in;
195 +    cryp.dst = out;
196 +    cryp.auth_src = NULL;
197 +    cryp.auth_len = 0;
198 +    cryp.iv = ctx->iv;
199 +    cryp.op = ctx->encrypt ? COP_ENCRYPT : COP_DECRYPT;
200 +
201 +    if (ioctl(state->d_fd, CIOCAUTHCRYPT, &cryp) == -1) {
202 +        return 0;
203 +    }
204 +
205 +    return len;
206 +}
207 +
208 +static int cryptodev_gcm_ctrl(EVP_CIPHER_CTX *ctx, int type, int arg,
209 +                              void *ptr)
210 +{
211 +    struct dev_crypto_state *state = ctx->cipher_data;
212 +    switch (type) {
213 +    case EVP_CTRL_INIT:
214 +        {
215 +            state->ivlen = ctx->cipher->iv_len;
216 +            state->iv = ctx->iv;
217 +            state->aad_len = -1;
218 +            return 1;
219 +        }
220 +    case EVP_CTRL_GCM_SET_IV_FIXED:
221 +        {
222 +            /* Special case: -1 length restores whole IV */
223 +            if (arg == -1) {
224 +                memcpy(state->iv, ptr, state->ivlen);
225 +                return 1;
226 +            }
227 +            /*
228 +             * Fixed field must be at least 4 bytes and invocation field at
229 +             * least 8.
230 +             */
231 +            if ((arg < 4) || (state->ivlen - arg) < 8)
232 +                return 0;
233 +            if (arg)
234 +                memcpy(state->iv, ptr, arg);
235 +            if (ctx->encrypt &&
236 +                RAND_bytes(state->iv + arg, state->ivlen - arg) <= 0)
237 +                return 0;
238 +            return 1;
239 +        }
240 +    case EVP_CTRL_AEAD_TLS1_AAD:
241 +        {
242 +            unsigned int len;
243 +            if (arg != 13)
244 +                return 0;
245 +
246 +            memcpy(ctx->buf, ptr, arg);
247 +            len = ctx->buf[arg - 2] << 8 | ctx->buf[arg - 1];
248 +
249 +            /* Correct length for explicit IV */
250 +            len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
251 +
252 +            /* If decrypting correct for tag too */
253 +            if (!ctx->encrypt)
254 +                len -= EVP_GCM_TLS_TAG_LEN;
255 +
256 +            ctx->buf[arg - 2] = len >> 8;
257 +            ctx->buf[arg - 1] = len & 0xff;
258 +
259 +            state->aad = ctx->buf;
260 +            state->aad_len = arg;
261 +            state->len = len;
262 +
263 +            /* Extra padding: tag appended to record */
264 +            return EVP_GCM_TLS_TAG_LEN;
265 +        }
266 +    case EVP_CTRL_GCM_SET_IV_INV:
267 +        {
268 +            if (ctx->encrypt)
269 +                return 0;
270 +            memcpy(state->iv + state->ivlen - arg, ptr, arg);
271 +            return 1;
272 +        }
273 +    case EVP_CTRL_GCM_IV_GEN:
274 +        if (arg <= 0 || arg > state->ivlen)
275 +            arg = state->ivlen;
276 +        memcpy(ptr, state->iv + state->ivlen - arg, arg);
277 +        return 1;
278 +    default:
279 +        return -1;
280 +    }
281 +}
282 +
283  /*
284   * libcrypto EVP stuff - this is how we get wired to EVP so the engine
285   * gets called when libcrypto requests a cipher NID.
286 @@ -947,6 +1162,22 @@ const EVP_CIPHER cryptodev_aes_256_cbc_hmac_sha1 = {
287      NULL
288  };
289  
290 +const EVP_CIPHER cryptodev_aes_128_gcm = {
291 +    NID_aes_128_gcm,
292 +    1, 16, 12,
293 +    EVP_CIPH_GCM_MODE | EVP_CIPH_FLAG_AEAD_CIPHER | EVP_CIPH_FLAG_DEFAULT_ASN1
294 +        | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER
295 +        | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT,
296 +    cryptodev_init_gcm_key,
297 +    cryptodev_gcm_cipher,
298 +    cryptodev_cleanup,
299 +    sizeof(struct dev_crypto_state),
300 +    EVP_CIPHER_set_asn1_iv,
301 +    EVP_CIPHER_get_asn1_iv,
302 +    cryptodev_gcm_ctrl,
303 +    NULL
304 +};
305 +
306  # ifdef CRYPTO_AES_CTR
307  const EVP_CIPHER cryptodev_aes_ctr = {
308      NID_aes_128_ctr,
309 @@ -1041,6 +1272,9 @@ cryptodev_engine_ciphers(ENGINE *e, const EVP_CIPHER **cipher,
310      case NID_aes_256_cbc_hmac_sha1:
311          *cipher = &cryptodev_aes_256_cbc_hmac_sha1;
312          break;
313 +    case NID_aes_128_gcm:
314 +        *cipher = &cryptodev_aes_128_gcm;
315 +        break;
316      default:
317          *cipher = NULL;
318          break;
319 -- 
320 2.7.0
321