]> code.ossystems Code Review - meta-freescale.git/blob
a71bb456019696a73b378b04a1be57fdd9e0189f
[meta-freescale.git] /
1 From 11b55103463bac614e00d74e9f196ec4ec6bade1 Mon Sep 17 00:00:00 2001
2 From: Cristian Stoica <cristian.stoica@freescale.com>
3 Date: Mon, 16 Jun 2014 14:06:21 +0300
4 Subject: [PATCH 17/17] cryptodev: add support for aes-gcm algorithm offloading
5
6 Change-Id: I3b77dc5ef8b8f707309549244a02852d95b36168
7 Signed-off-by: Cristian Stoica <cristian.stoica@freescale.com>
8 Reviewed-on: http://git.am.freescale.net:8181/17226
9 ---
10  apps/speed.c                  |   6 +-
11  crypto/engine/eng_cryptodev.c | 229 +++++++++++++++++++++++++++++++++++++++++-
12  2 files changed, 233 insertions(+), 2 deletions(-)
13
14 diff --git a/apps/speed.c b/apps/speed.c
15 index 9886ca3..099dede 100644
16 --- a/apps/speed.c
17 +++ b/apps/speed.c
18 @@ -224,7 +224,11 @@
19  #endif
20  
21  #undef BUFSIZE
22 -#define BUFSIZE        ((long)1024*8+1)
23 +/* The buffer overhead allows GCM tag at the end of the encrypted data. This
24 +   avoids buffer overflows from cryptodev since Linux kernel GCM
25 +   implementation allways adds the tag - unlike e_aes.c:aes_gcm_cipher()
26 +   which doesn't */
27 +#define BUFSIZE        ((long)1024*8 + EVP_GCM_TLS_TAG_LEN)
28  int run=0;
29  
30  static int mr=0;
31 diff --git a/crypto/engine/eng_cryptodev.c b/crypto/engine/eng_cryptodev.c
32 index 13d924f..4493490 100644
33 --- a/crypto/engine/eng_cryptodev.c
34 +++ b/crypto/engine/eng_cryptodev.c
35 @@ -78,8 +78,10 @@ struct dev_crypto_state {
36         struct session_op d_sess;
37         int d_fd;
38         unsigned char *aad;
39 -       unsigned int aad_len;
40 +       int aad_len;
41         unsigned int len;
42 +       unsigned char *iv;
43 +       int ivlen;
44  
45  #ifdef USE_CRYPTODEV_DIGESTS
46         char dummy_mac_key[HASH_MAX_LEN];
47 @@ -251,6 +253,7 @@ static struct {
48         { CRYPTO_SKIPJACK_CBC,  NID_undef,        0,  0,  0},
49         { CRYPTO_TLS10_AES_CBC_HMAC_SHA1, NID_aes_128_cbc_hmac_sha1, 16, 16, 20},
50         { CRYPTO_TLS10_AES_CBC_HMAC_SHA1, NID_aes_256_cbc_hmac_sha1, 16, 32, 20},
51 +       { CRYPTO_AES_GCM,       NID_aes_128_gcm,  16, 16, 0},
52         { 0, NID_undef, 0, 0, 0},
53  };
54  
55 @@ -271,6 +274,19 @@ static struct {
56  };
57  #endif
58  
59 +/* increment counter (64-bit int) by 1 */
60 +static void ctr64_inc(unsigned char *counter) {
61 +       int n=8;
62 +       unsigned char  c;
63 +
64 +       do {
65 +               --n;
66 +               c = counter[n];
67 +               ++c;
68 +               counter[n] = c;
69 +               if (c) return;
70 +       } while (n);
71 +}
72  /*
73   * Return a fd if /dev/crypto seems usable, 0 otherwise.
74   */
75 @@ -762,6 +778,197 @@ static int cryptodev_cbc_hmac_sha1_ctrl(EVP_CIPHER_CTX *ctx, int type, int arg,
76         }
77  }
78  
79 +static int cryptodev_init_gcm_key(EVP_CIPHER_CTX *ctx,
80 +       const unsigned char *key, const unsigned char *iv, int enc)
81 +{
82 +       struct dev_crypto_state *state = ctx->cipher_data;
83 +       struct session_op *sess = &state->d_sess;
84 +       int cipher = -1, i;
85 +       if (!iv && !key)
86 +               return 1;
87 +
88 +       if (iv)
89 +               memcpy(ctx->iv, iv, ctx->cipher->iv_len);
90 +
91 +       for (i = 0; ciphers[i].id; i++)
92 +               if (ctx->cipher->nid == ciphers[i].nid &&
93 +                   ctx->cipher->iv_len <= ciphers[i].ivmax &&
94 +                   ctx->key_len == ciphers[i].keylen) {
95 +                       cipher = ciphers[i].id;
96 +                       break;
97 +               }
98 +
99 +       if (!ciphers[i].id) {
100 +               state->d_fd = -1;
101 +               return 0;
102 +       }
103 +
104 +       memset(sess, 0, sizeof(struct session_op));
105 +
106 +       if ((state->d_fd = get_dev_crypto()) < 0)
107 +               return 0;
108 +
109 +       sess->key = (unsigned char *) key;
110 +       sess->keylen = ctx->key_len;
111 +       sess->cipher = cipher;
112 +
113 +       if (ioctl(state->d_fd, CIOCGSESSION, sess) == -1) {
114 +               put_dev_crypto(state->d_fd);
115 +               state->d_fd = -1;
116 +               return 0;
117 +       }
118 +       return 1;
119 +}
120 +
121 +static int cryptodev_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
122 +               const unsigned char *in, size_t len)
123 +{
124 +       struct crypt_auth_op cryp = {0};
125 +       struct dev_crypto_state *state = ctx->cipher_data;
126 +       struct session_op *sess = &state->d_sess;
127 +       int rv = len;
128 +
129 +       if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ?
130 +                       EVP_CTRL_GCM_IV_GEN : EVP_CTRL_GCM_SET_IV_INV,
131 +                       EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
132 +               return 0;
133 +
134 +       in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
135 +       out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
136 +       len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
137 +
138 +       if (ctx->encrypt) {
139 +               len -= EVP_GCM_TLS_TAG_LEN;
140 +       }
141 +       cryp.ses = sess->ses;
142 +       cryp.len = len;
143 +       cryp.src = (unsigned char*) in;
144 +       cryp.dst = out;
145 +       cryp.auth_src = state->aad;
146 +       cryp.auth_len = state->aad_len;
147 +       cryp.iv = ctx->iv;
148 +       cryp.op = ctx->encrypt ? COP_ENCRYPT : COP_DECRYPT;
149 +
150 +       if (ioctl(state->d_fd, CIOCAUTHCRYPT, &cryp) == -1) {
151 +               return 0;
152 +       }
153 +
154 +       if (ctx->encrypt)
155 +               ctr64_inc(state->iv + state->ivlen - 8);
156 +       else
157 +               rv = len - EVP_GCM_TLS_TAG_LEN;
158 +
159 +       return rv;
160 +}
161 +
162 +static int cryptodev_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
163 +               const unsigned char *in, size_t len)
164 +{
165 +       struct crypt_auth_op cryp;
166 +       struct dev_crypto_state *state = ctx->cipher_data;
167 +       struct session_op *sess = &state->d_sess;
168 +
169 +       if (state->d_fd < 0)
170 +               return 0;
171 +
172 +       if ((len % ctx->cipher->block_size) != 0)
173 +               return 0;
174 +
175 +       if (state->aad_len >= 0)
176 +               return cryptodev_gcm_tls_cipher(ctx, out, in, len);
177 +
178 +       memset(&cryp, 0, sizeof(cryp));
179 +
180 +       cryp.ses = sess->ses;
181 +       cryp.len = len;
182 +       cryp.src = (unsigned char*) in;
183 +       cryp.dst = out;
184 +       cryp.auth_src = NULL;
185 +       cryp.auth_len = 0;
186 +       cryp.iv = ctx->iv;
187 +       cryp.op = ctx->encrypt ? COP_ENCRYPT : COP_DECRYPT;
188 +
189 +       if (ioctl(state->d_fd, CIOCAUTHCRYPT, &cryp) == -1) {
190 +               return 0;
191 +       }
192 +
193 +       return len;
194 +}
195 +
196 +static int cryptodev_gcm_ctrl(EVP_CIPHER_CTX *ctx, int type, int arg,
197 +               void *ptr)
198 +{
199 +       struct dev_crypto_state *state = ctx->cipher_data;
200 +       switch (type) {
201 +       case EVP_CTRL_INIT:
202 +       {
203 +               state->ivlen = ctx->cipher->iv_len;
204 +               state->iv = ctx->iv;
205 +               state->aad_len = -1;
206 +               return 1;
207 +       }
208 +       case EVP_CTRL_GCM_SET_IV_FIXED:
209 +       {
210 +               /* Special case: -1 length restores whole IV */
211 +               if (arg == -1)
212 +                       {
213 +                       memcpy(state->iv, ptr, state->ivlen);
214 +                       return 1;
215 +                       }
216 +               /* Fixed field must be at least 4 bytes and invocation field
217 +                * at least 8.
218 +                */
219 +               if ((arg < 4) || (state->ivlen - arg) < 8)
220 +                       return 0;
221 +               if (arg)
222 +                       memcpy(state->iv, ptr, arg);
223 +               if (ctx->encrypt &&
224 +                       RAND_bytes(state->iv + arg, state->ivlen - arg) <= 0)
225 +                       return 0;
226 +               return 1;
227 +       }
228 +       case EVP_CTRL_AEAD_TLS1_AAD:
229 +       {
230 +               unsigned int len;
231 +               if (arg != 13)
232 +                       return 0;
233 +
234 +               memcpy(ctx->buf, ptr, arg);
235 +               len=ctx->buf[arg-2] << 8 | ctx->buf[arg-1];
236 +
237 +               /* Correct length for explicit IV */
238 +               len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
239 +
240 +               /* If decrypting correct for tag too */
241 +               if (!ctx->encrypt)
242 +                       len -= EVP_GCM_TLS_TAG_LEN;
243 +
244 +               ctx->buf[arg-2] = len >> 8;
245 +               ctx->buf[arg-1] = len & 0xff;
246 +
247 +               state->aad = ctx->buf;
248 +               state->aad_len = arg;
249 +               state->len = len;
250 +
251 +               /* Extra padding: tag appended to record */
252 +               return EVP_GCM_TLS_TAG_LEN;
253 +       }
254 +       case EVP_CTRL_GCM_SET_IV_INV:
255 +       {
256 +               if (ctx->encrypt)
257 +                       return 0;
258 +               memcpy(state->iv + state->ivlen - arg, ptr, arg);
259 +               return 1;
260 +       }
261 +       case EVP_CTRL_GCM_IV_GEN:
262 +               if (arg <= 0 || arg > state->ivlen)
263 +                       arg = state->ivlen;
264 +               memcpy(ptr, state->iv + state->ivlen - arg, arg);
265 +               return 1;
266 +       default:
267 +               return -1;
268 +       }
269 +}
270  /*
271   * libcrypto EVP stuff - this is how we get wired to EVP so the engine
272   * gets called when libcrypto requests a cipher NID.
273 @@ -901,6 +1108,23 @@ const EVP_CIPHER cryptodev_aes_256_cbc_hmac_sha1 = {
274         cryptodev_cbc_hmac_sha1_ctrl,
275         NULL
276  };
277 +
278 +const EVP_CIPHER cryptodev_aes_128_gcm = {
279 +       NID_aes_128_gcm,
280 +       1, 16, 12,
281 +       EVP_CIPH_GCM_MODE | EVP_CIPH_FLAG_AEAD_CIPHER | EVP_CIPH_FLAG_DEFAULT_ASN1 \
282 +       | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
283 +       | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT,
284 +       cryptodev_init_gcm_key,
285 +       cryptodev_gcm_cipher,
286 +       cryptodev_cleanup,
287 +       sizeof(struct dev_crypto_state),
288 +       EVP_CIPHER_set_asn1_iv,
289 +       EVP_CIPHER_get_asn1_iv,
290 +       cryptodev_gcm_ctrl,
291 +       NULL
292 +};
293 +
294  /*
295   * Registered by the ENGINE when used to find out how to deal with
296   * a particular NID in the ENGINE. this says what we'll do at the
297 @@ -944,6 +1168,9 @@ cryptodev_engine_ciphers(ENGINE *e, const EVP_CIPHER **cipher,
298         case NID_aes_256_cbc_hmac_sha1:
299                 *cipher = &cryptodev_aes_256_cbc_hmac_sha1;
300                 break;
301 +       case NID_aes_128_gcm:
302 +               *cipher = &cryptodev_aes_128_gcm;
303 +               break;
304         default:
305                 *cipher = NULL;
306                 break;
307 -- 
308 1.8.3.1
309