blob: f831006795b69f92126784469539dc61c1ddfa6a [file] [log] [blame]
developer2df22aa2024-03-22 14:36:06 +08001// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * Copyright (C) 2023 MediaTek Inc.
4 *
5 * Author: Chris.Chou <chris.chou@mediatek.com>
6 * Ren-Ting Wang <ren-ting.wang@mediatek.com>
7 */
8
9#include <linux/bitops.h>
10#include <crypto/aes.h>
11#include <crypto/hmac.h>
12#include <crypto/skcipher.h>
13#include <crypto/internal/skcipher.h>
14#include <crypto/internal/hash.h>
15
16#include "crypto-eip/crypto-eip.h"
17#include "crypto-eip/ddk-wrapper.h"
18#include "crypto-eip/lookaside.h"
19#include "crypto-eip/internal.h"
20
21static inline u64 mtk_crypto_queued_len(struct mtk_crypto_ahash_req *req)
22{
23 return req->len - req->processed;
24}
25
26static int mtk_crypto_ahash_enqueue(struct ahash_request *areq)
27{
28 struct mtk_crypto_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
29 struct mtk_crypto_priv *priv = ctx->priv;
30 int ret;
31
32 spin_lock_bh(&priv->mtk_eip_queue.queue_lock);
33 ret = crypto_enqueue_request(&priv->mtk_eip_queue.queue, &areq->base);
34 spin_unlock_bh(&priv->mtk_eip_queue.queue_lock);
35
36 queue_work(priv->mtk_eip_queue.workqueue,
37 &priv->mtk_eip_queue.work_data.work);
38
39 return ret;
40}
41
42static int mtk_crypto_ahash_send(struct crypto_async_request *async)
43{
44 struct ahash_request *areq = ahash_request_cast(async);
45 struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
46 struct mtk_crypto_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
47 int cache_len;
48 int extra = 0;
49 int areq_shift;
50 u64 queued;
51 u64 len;
52 bool ret;
53 uint8_t *cur_req;
54 int i;
55
56 if (req->hmac_zlen)
57 goto zero_length_hmac;
58 areq_shift = 0;
59 queued = mtk_crypto_queued_len(req);
60 if (queued <= HASH_CACHE_SIZE)
61 cache_len = queued;
62 else
63 cache_len = queued - areq->nbytes;
64
65 if (!req->finish && !req->last_req) {
66 /* If this is not the last request and the queued data does not
67 * fit into full cache blocks, cache it for the next send call.
68 */
69 extra = queued & (HASH_CACHE_SIZE - 1);
70
71 if (!extra)
72 extra = HASH_CACHE_SIZE;
73
74 sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
75 req->cache_next, extra, areq->nbytes - extra);
76
77 queued -= extra;
78
79 if (!queued)
80 return 0;
81
82 extra = 0;
83 }
84
85 len = queued;
86 cur_req = kmalloc(sizeof(uint8_t) * len + AES_BLOCK_SIZE, GFP_KERNEL);
87 if (!cur_req) {
88 CRYPTO_ERR("alloc buffer for ahash request failed\n");
89 goto exit;
90 }
91 /* Send request to EIP197 */
92 if (cache_len) {
93 memcpy(cur_req, req->cache, cache_len);
94 queued -= cache_len;
95 }
96 if (queued)
97 sg_copy_to_buffer(areq->src, sg_nents(areq->src), cur_req + cache_len, queued);
98
99 if (unlikely(req->xcbcmac)) {
100 int pad_size;
101 int offset;
102 int new;
103
104 if (req->finish) {
105 new = len % AES_BLOCK_SIZE;
106 pad_size = AES_BLOCK_SIZE - new;
107 offset = (len - new) / sizeof(u32);
108
109 if (pad_size != AES_BLOCK_SIZE) {
110 memset(cur_req + len, 0, pad_size);
111 cur_req[len] = 0x80;
112 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
113 ((__be32 *) cur_req)[offset + i] ^=
114 cpu_to_be32(le32_to_cpu(
115 ctx->ipad[i + 4]));
116 }
117 } else {
118 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
119 ((__be32 *) cur_req)[offset - 4 + i] ^=
120 cpu_to_be32(le32_to_cpu(
121 ctx->ipad[i]));
122 pad_size = 0;
123 }
124 }
125
126 ret = crypto_ahash_aes_cbc(async, req, cur_req, len + pad_size);
127 kfree(cur_req);
128 if (ret) {
129 if (req->sa_pointer)
130 crypto_free_sa(req->sa_pointer);
131 kfree(req->token_context);
132 CRYPTO_ERR("Fail on ahash_aes_cbc process\n");
133 goto exit;
134 }
135 req->not_first = true;
136 req->processed += len - extra;
137
138 return 0;
139 }
140
141 if (req->not_first)
142 ret = crypto_ahash_token_req(async, req, cur_req, len, req->finish);
143 else
144 ret = crypto_first_ahash_req(async, req, cur_req, len, req->finish);
145
146 kfree(cur_req);
147
148 if (ret) {
149 if (req->sa_pointer)
150 crypto_free_sa(req->sa_pointer);
151 kfree(req->token_context);
152 CRYPTO_ERR("Fail on ahash_req process\n");
153 goto exit;
154 }
155 req->not_first = true;
156 req->processed += len - extra;
157
158 return 0;
159
160zero_length_hmac:
161 if (req->sa_pointer)
162 crypto_free_sa(req->sa_pointer);
163 kfree(req->token_context);
164
165 /* complete the final hash with opad for hmac*/
166 if (req->hmac) {
167 req->sa_pointer = ctx->opad_sa;
168 req->token_context = ctx->opad_token;
169 ret = crypto_ahash_token_req(async, req, (uint8_t *) req->state,
170 req->digest_sz, true);
171 }
172
173 return 0;
174exit:
175 async->complete(async, ret);
176
177 return 0;
178}
179
180static int mtk_crypto_ahash_handle_result(struct mtk_crypto_result *res, int err)
181{
182 struct crypto_async_request *async = res->async;
183 struct ahash_request *areq = ahash_request_cast(async);
184 struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
185 struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
186 int cache_len;
187
188 if (req->xcbcmac) {
189 memcpy(req->state, res->dst + res->size - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
190 crypto_free_sa(res->eip.sa);
191 kfree(res->eip.token_context);
192 } else
193 memcpy(req->state, res->dst, req->digest_sz);
194
195 crypto_free_token(res->eip.token);
196 crypto_free_pkt(res->eip.pkt_handle);
197
198 if (req->finish) {
199 if (req->hmac && !req->hmac_zlen) {
200 req->hmac_zlen = true;
201 mtk_crypto_ahash_enqueue(areq);
202 return 0;
203 }
204 if (req->sa_pointer)
205 crypto_free_sa(req->sa_pointer);
206
207 kfree(req->token_context);
208
209 memcpy(areq->result, req->state, crypto_ahash_digestsize(ahash));
210 }
211
212 cache_len = mtk_crypto_queued_len(req);
213 if (cache_len)
214 memcpy(req->cache, req->cache_next, cache_len);
215 async->complete(async, 0);
216
217 return 0;
218}
219
220static int mtk_crypto_ahash_cache(struct ahash_request *areq)
221{
222 struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
223 u64 cache_len;
224
225 cache_len = mtk_crypto_queued_len(req);
226
227 if (cache_len + areq->nbytes <= HASH_CACHE_SIZE) {
228 sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
229 req->cache + cache_len,
230 areq->nbytes, 0);
231 return 0;
232 }
233
234 return -E2BIG;
235}
236
237static int mtk_crypto_ahash_update(struct ahash_request *areq)
238{
239 struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
240 int ret;
241
242 if (!areq->nbytes)
243 return 0;
244
245 ret = mtk_crypto_ahash_cache(areq);
246
247 req->len += areq->nbytes;
248
249 if ((ret && !req->finish) || req->last_req)
250 return mtk_crypto_ahash_enqueue(areq);
251
252 return 0;
253}
254
255static int mtk_crypto_ahash_final(struct ahash_request *areq)
256{
257 struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
258 struct mtk_crypto_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
259
260 req->finish = true;
261
262 if (unlikely(!req->len && !areq->nbytes && !req->hmac)) {
263 if (ctx->alg == MTK_CRYPTO_ALG_SHA1)
264 memcpy(areq->result, sha1_zero_message_hash,
265 SHA1_DIGEST_SIZE);
266 else if (ctx->alg == MTK_CRYPTO_ALG_SHA224)
267 memcpy(areq->result, sha224_zero_message_hash,
268 SHA224_DIGEST_SIZE);
269 else if (ctx->alg == MTK_CRYPTO_ALG_SHA256)
270 memcpy(areq->result, sha256_zero_message_hash,
271 SHA256_DIGEST_SIZE);
272 else if (ctx->alg == MTK_CRYPTO_ALG_SHA384)
273 memcpy(areq->result, sha384_zero_message_hash,
274 SHA384_DIGEST_SIZE);
275 else if (ctx->alg == MTK_CRYPTO_ALG_SHA512)
276 memcpy(areq->result, sha512_zero_message_hash,
277 SHA512_DIGEST_SIZE);
278 else if (ctx->alg == MTK_CRYPTO_ALG_MD5)
279 memcpy(areq->result, md5_zero_message_hash,
280 MD5_DIGEST_SIZE);
281
282 return 0;
283 } else if (unlikely(req->digest == MTK_CRYPTO_DIGEST_XCM &&
284 ctx->alg == MTK_CRYPTO_ALG_MD5 && req->len == sizeof(u32) &&
285 !areq->nbytes)) {
286 memcpy(areq->result, ctx->ipad, sizeof(u32));
287 return 0;
288 } else if (unlikely(ctx->cbcmac && req->len == AES_BLOCK_SIZE &&
289 !areq->nbytes)) {
290 memset(areq->result, 0, AES_BLOCK_SIZE);
291 return 0;
292 } else if (unlikely(req->xcbcmac && req->len == AES_BLOCK_SIZE &&
293 !areq->nbytes)) {
294 int i;
295
296 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
297 ((__be32 *) areq->result)[i] =
298 cpu_to_be32(le32_to_cpu(ctx->ipad[i + 4]));
299 areq->result[0] ^= 0x80;
300 crypto_cipher_encrypt_one(ctx->kaes, areq->result, areq->result);
301 return 0;
302 } else if (unlikely(req->hmac && (req->len == req->block_sz) &&
303 !areq->nbytes)) {
304 memcpy(req->state, ctx->zero_hmac, req->state_sz);
305 req->hmac_zlen = true;
306 } else if (req->hmac) {
307 req->digest = MTK_CRYPTO_DIGEST_HMAC;
308 }
309
310 return mtk_crypto_ahash_enqueue(areq);
311}
312
313static int mtk_crypto_ahash_finup(struct ahash_request *areq)
314{
315 struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
316
317 req->finish = true;
318
319 mtk_crypto_ahash_update(areq);
320 return mtk_crypto_ahash_final(areq);
321}
322
323static int mtk_crypto_ahash_export(struct ahash_request *areq, void *out)
324{
325 struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
326 struct mtk_crypto_ahash_export_state *export = out;
327
328 export->len = req->len;
329 export->processed = req->processed;
330
331 export->digest = req->digest;
332 export->sa_pointer = req->sa_pointer;
333 export->token_context = req->token_context;
334
335 memcpy(export->state, req->state, req->state_sz);
336 memcpy(export->cache, req->cache, HASH_CACHE_SIZE);
337
338 return 0;
339}
340
341static int mtk_crypto_ahash_import(struct ahash_request *areq, const void *in)
342{
343 struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
344 const struct mtk_crypto_ahash_export_state *export = in;
345 int ret;
346
347 ret = crypto_ahash_init(areq);
348 if (ret)
349 return ret;
350
351 req->len = export->len;
352 req->processed = export->processed;
353
354 req->digest = export->digest;
355 req->sa_pointer = export->sa_pointer;
356 req->token_context = export->token_context;
357 if (req->sa_pointer)
358 req->not_first = true;
359
360 memcpy(req->cache, export->cache, HASH_CACHE_SIZE);
361 memcpy(req->state, export->state, req->state_sz);
362
363 return 0;
364}
365
366static int mtk_crypto_ahash_cra_init(struct crypto_tfm *tfm)
367{
368 struct mtk_crypto_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
369 struct mtk_crypto_alg_template *tmpl =
370 container_of(__crypto_ahash_alg(tfm->__crt_alg),
371 struct mtk_crypto_alg_template, alg.ahash);
372
373 ctx->priv = tmpl->priv;
374 ctx->base.send = mtk_crypto_ahash_send;
375 ctx->base.handle_result = mtk_crypto_ahash_handle_result;
376 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
377 sizeof(struct mtk_crypto_ahash_req));
378
379 return 0;
380}
381
382static int mtk_crypto_sha1_init(struct ahash_request *areq)
383{
384 struct mtk_crypto_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
385 struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
386
387 memset(req, 0, sizeof(*req));
388
389 ctx->alg = MTK_CRYPTO_ALG_SHA1;
390 req->digest = MTK_CRYPTO_DIGEST_PRECOMPUTED;
391 req->state_sz = SHA1_DIGEST_SIZE;
392 req->digest_sz = SHA1_DIGEST_SIZE;
393 req->block_sz = SHA1_BLOCK_SIZE;
394
395 return 0;
396}
397
398static int mtk_crypto_sha1_digest(struct ahash_request *areq)
399{
400 int ret = mtk_crypto_sha1_init(areq);
401
402 if (ret)
403 return ret;
404
405 return mtk_crypto_ahash_finup(areq);
406}
407
408static void mtk_crypto_ahash_cra_exit(struct crypto_tfm *tfm)
409{
410}
411
412struct mtk_crypto_alg_template mtk_crypto_sha1 = {
413 .type = MTK_CRYPTO_ALG_TYPE_AHASH,
414 .alg.ahash = {
415 .init = mtk_crypto_sha1_init,
416 .update = mtk_crypto_ahash_update,
417 .final = mtk_crypto_ahash_final,
418 .finup = mtk_crypto_ahash_finup,
419 .digest = mtk_crypto_sha1_digest,
420 .export = mtk_crypto_ahash_export,
421 .import = mtk_crypto_ahash_import,
422 .halg = {
423 .digestsize = SHA1_DIGEST_SIZE,
424 .statesize = sizeof(struct mtk_crypto_ahash_export_state),
425 .base = {
426 .cra_name = "sha1",
427 .cra_driver_name = "crypto-eip-sha1",
428 .cra_priority = MTK_CRYPTO_PRIORITY,
429 .cra_flags = CRYPTO_ALG_ASYNC |
430 CRYPTO_ALG_KERN_DRIVER_ONLY,
431 .cra_blocksize = SHA1_BLOCK_SIZE,
432 .cra_ctxsize = sizeof(struct mtk_crypto_ahash_ctx),
433 .cra_init = mtk_crypto_ahash_cra_init,
434 .cra_exit = mtk_crypto_ahash_cra_exit,
435 .cra_module = THIS_MODULE,
436 },
437 },
438 },
439};
440
441static int mtk_crypto_hmac_sha1_init(struct ahash_request *areq)
442{
443 struct mtk_crypto_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
444 struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
445
446 memset(req, 0, sizeof(*req));
447
448 memcpy(req->state, ctx->ipad, SHA1_DIGEST_SIZE);
449 req->sa_pointer = ctx->ipad_sa;
450 req->token_context = ctx->ipad_token;
451 req->not_first = true;
452 req->len = SHA1_BLOCK_SIZE;
453 req->processed = SHA1_BLOCK_SIZE;
454
455 ctx->alg = MTK_CRYPTO_ALG_SHA1;
456 req->digest = MTK_CRYPTO_DIGEST_PRECOMPUTED;
457 req->state_sz = SHA1_DIGEST_SIZE;
458 req->digest_sz = SHA1_DIGEST_SIZE;
459 req->block_sz = SHA1_BLOCK_SIZE;
460 req->hmac = true;
461
462 return 0;
463}
464
465static int mtk_crypto_hmac_sha1_digest(struct ahash_request *areq)
466{
467 int ret = mtk_crypto_hmac_sha1_init(areq);
468
469 if (ret)
470 return ret;
471
472 return mtk_crypto_ahash_finup(areq);
473}
474
475struct mtk_crypto_ahash_result {
476 struct completion completion;
477 int error;
478};
479
480static void mtk_crypto_ahash_complete(struct crypto_async_request *req, int error)
481{
482 struct mtk_crypto_ahash_result *result = req->data;
483
484 if (error == -EINPROGRESS)
485 return;
486
487 result->error = error;
488 complete(&result->completion);
489}
490
491static int mtk_crypto_hmac_init_pad(struct ahash_request *areq, unsigned int blocksize,
492 const u8 *key, unsigned int keylen,
493 u8 *ipad, u8 *opad)
494{
495 struct mtk_crypto_ahash_result result;
496 struct scatterlist sg;
497 int ret, i;
498 u8 *keydup;
499
500 if (keylen <= blocksize) {
501 memcpy(ipad, key, keylen);
502 } else {
503 keydup = kmemdup(key, keylen, GFP_KERNEL);
504 if (!keydup)
505 return -ENOMEM;
506
507 ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
508 mtk_crypto_ahash_complete, &result);
509 sg_init_one(&sg, keydup, keylen);
510 ahash_request_set_crypt(areq, &sg, ipad, keylen);
511 init_completion(&result.completion);
512
513 ret = crypto_ahash_digest(areq);
514 if (ret == -EINPROGRESS || ret == -EBUSY) {
515 wait_for_completion_interruptible(&result.completion);
516 ret = result.error;
517 }
518
519 memzero_explicit(keydup, keylen);
520 kfree(keydup);
521
522 if (ret)
523 return ret;
524
525 keylen = crypto_ahash_digestsize(crypto_ahash_reqtfm(areq));
526 }
527
528 memset(ipad + keylen, 0, blocksize - keylen);
529 memcpy(opad, ipad, blocksize);
530
531 for (i = 0; i < blocksize; i++) {
532 ipad[i] ^= HMAC_IPAD_VALUE;
533 opad[i] ^= HMAC_OPAD_VALUE;
534 }
535
536 return 0;
537}
538
539static int mtk_crypto_hmac_init_iv(struct ahash_request *areq, unsigned int blocksize,
540 u8 *pad, void *state, bool zero)
541{
542 struct mtk_crypto_ahash_result result;
543 struct mtk_crypto_ahash_req *req;
544 struct scatterlist sg;
545 int ret;
546
547 ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
548 mtk_crypto_ahash_complete, &result);
549 sg_init_one(&sg, pad, blocksize);
550 ahash_request_set_crypt(areq, &sg, pad, blocksize);
551 init_completion(&result.completion);
552
553 ret = crypto_ahash_init(areq);
554 if (ret)
555 return ret;
556
557 req = ahash_request_ctx(areq);
558 req->last_req = !zero;
559
560 if (zero)
561 ret = crypto_ahash_finup(areq);
562 else
563 ret = crypto_ahash_update(areq);
564 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
565 return ret;
566
567 wait_for_completion_interruptible(&result.completion);
568 if (result.error)
569 return result.error;
570
571 return crypto_ahash_export(areq, state);
572}
573
574int mtk_crypto_zero_hmac_setkey(const char *alg, const u8 *key, unsigned int keylen,
575 void *istate)
576{
577 struct ahash_request *areq;
578 struct crypto_ahash *tfm;
579 unsigned int blocksize;
580 u8 *ipad, *opad;
581 int ret;
582
583 tfm = crypto_alloc_ahash(alg, 0, 0);
584 if (IS_ERR(tfm))
585 return PTR_ERR(tfm);
586
587 areq = ahash_request_alloc(tfm, GFP_KERNEL);
588 if (!areq) {
589 ret = -ENOMEM;
590 goto free_ahash;
591 }
592
593 crypto_ahash_clear_flags(tfm, ~0);
594 blocksize = crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
595
596 ipad = kcalloc(2, blocksize, GFP_KERNEL);
597 if (!ipad) {
598 ret = -ENOMEM;
599 goto free_request;
600 }
601
602 opad = ipad + blocksize;
603
604 ret = mtk_crypto_hmac_init_pad(areq, blocksize, key, keylen, ipad, opad);
605 if (ret)
606 goto free_ipad;
607
608 ret = mtk_crypto_hmac_init_iv(areq, blocksize, ipad, istate, true);
609 if (ret)
610 goto free_ipad;
611
612free_ipad:
613 kfree(ipad);
614free_request:
615 ahash_request_free(areq);
616free_ahash:
617 crypto_free_ahash(tfm);
618
619 return ret;
620}
621
622int mtk_crypto_hmac_setkey(const char *alg, const u8 *key, unsigned int keylen,
623 void *istate, void *ostate)
624{
625 struct ahash_request *areq;
626 struct crypto_ahash *tfm;
627 unsigned int blocksize;
628 u8 *ipad, *opad;
629 int ret;
630
631 tfm = crypto_alloc_ahash(alg, 0, 0);
632 if (IS_ERR(tfm))
633 return PTR_ERR(tfm);
634
635 areq = ahash_request_alloc(tfm, GFP_KERNEL);
636 if (!areq) {
637 ret = -ENOMEM;
638 goto free_ahash;
639 }
640
641 crypto_ahash_clear_flags(tfm, ~0);
642 blocksize = crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
643
644 ipad = kcalloc(2, blocksize, GFP_KERNEL);
645 if (!ipad) {
646 ret = -ENOMEM;
647 goto free_request;
648 }
649
650 opad = ipad + blocksize;
651
652 ret = mtk_crypto_hmac_init_pad(areq, blocksize, key, keylen, ipad, opad);
653 if (ret)
654 goto free_ipad;
655
656 ret = mtk_crypto_hmac_init_iv(areq, blocksize, ipad, istate, false);
657 if (ret)
658 goto free_ipad;
659
660 ret = mtk_crypto_hmac_init_iv(areq, blocksize, opad, ostate, false);
661
662free_ipad:
663 kfree(ipad);
664free_request:
665 ahash_request_free(areq);
666free_ahash:
667 crypto_free_ahash(tfm);
668
669 return ret;
670}
671
672static int mtk_crypto_hmac_alg_setkey(struct crypto_ahash *tfm, const u8 *key,
673 unsigned int keylen, const char *alg,
674 unsigned int state_sz)
675{
676 struct mtk_crypto_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
677 struct mtk_crypto_ahash_export_state istate, ostate, zeroi;
678 int ret;
679
680 ret = mtk_crypto_hmac_setkey(alg, key, keylen, &istate, &ostate);
681 if (ret)
682 return ret;
683
684 ret = mtk_crypto_zero_hmac_setkey(alg, key, keylen, &zeroi);
685 if (ret)
686 return ret;
687
688 memcpy(ctx->zero_hmac, &zeroi.state, state_sz);
689 memcpy(ctx->ipad, &istate.state, state_sz);
690 memcpy(ctx->opad, &ostate.state, state_sz);
691 ctx->ipad_sa = istate.sa_pointer;
692 ctx->ipad_token = istate.token_context;
693 ctx->opad_sa = ostate.sa_pointer;
694 ctx->opad_token = ostate.token_context;
695
696 return 0;
697}
698
699static int mtk_crypto_hmac_sha1_setkey(struct crypto_ahash *tfm, const u8 *key,
700 unsigned int keylen)
701{
702 return mtk_crypto_hmac_alg_setkey(tfm, key, keylen, "crypto-eip-sha1",
703 SHA1_DIGEST_SIZE);
704}
705
706struct mtk_crypto_alg_template mtk_crypto_hmac_sha1 = {
707 .type = MTK_CRYPTO_ALG_TYPE_AHASH,
708 .alg.ahash = {
709 .init = mtk_crypto_hmac_sha1_init,
710 .update = mtk_crypto_ahash_update,
711 .final = mtk_crypto_ahash_final,
712 .finup = mtk_crypto_ahash_finup,
713 .digest = mtk_crypto_hmac_sha1_digest,
714 .setkey = mtk_crypto_hmac_sha1_setkey,
715 .export = mtk_crypto_ahash_export,
716 .import = mtk_crypto_ahash_import,
717 .halg = {
718 .digestsize = SHA1_DIGEST_SIZE,
719 .statesize = sizeof(struct mtk_crypto_ahash_export_state),
720 .base = {
721 .cra_name = "hmac(sha1)",
722 .cra_driver_name = "crypto-eip-hmac-sha1",
723 .cra_priority = MTK_CRYPTO_PRIORITY,
724 .cra_flags = CRYPTO_ALG_ASYNC |
725 CRYPTO_ALG_KERN_DRIVER_ONLY,
726 .cra_blocksize = SHA1_BLOCK_SIZE,
727 .cra_ctxsize = sizeof(struct mtk_crypto_ahash_ctx),
728 .cra_init = mtk_crypto_ahash_cra_init,
729 .cra_exit = mtk_crypto_ahash_cra_exit,
730 .cra_module = THIS_MODULE,
731 },
732 },
733 },
734};
735
736static int mtk_crypto_sha256_init(struct ahash_request *areq)
737{
738 struct mtk_crypto_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
739 struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
740
741 memset(req, 0, sizeof(*req));
742
743 ctx->alg = MTK_CRYPTO_ALG_SHA256;
744 req->digest = MTK_CRYPTO_DIGEST_PRECOMPUTED;
745 req->state_sz = SHA256_DIGEST_SIZE;
746 req->digest_sz = SHA256_DIGEST_SIZE;
747 req->block_sz = SHA256_BLOCK_SIZE;
748
749 return 0;
750}
751
752static int mtk_crypto_sha256_digest(struct ahash_request *areq)
753{
754 int ret = mtk_crypto_sha256_init(areq);
755
756 if (ret)
757 return ret;
758
759 return mtk_crypto_ahash_finup(areq);
760}
761
762struct mtk_crypto_alg_template mtk_crypto_sha256 = {
763 .type = MTK_CRYPTO_ALG_TYPE_AHASH,
764 .alg.ahash = {
765 .init = mtk_crypto_sha256_init,
766 .update = mtk_crypto_ahash_update,
767 .final = mtk_crypto_ahash_final,
768 .finup = mtk_crypto_ahash_finup,
769 .digest = mtk_crypto_sha256_digest,
770 .export = mtk_crypto_ahash_export,
771 .import = mtk_crypto_ahash_import,
772 .halg = {
773 .digestsize = SHA256_DIGEST_SIZE,
774 .statesize = sizeof(struct mtk_crypto_ahash_export_state),
775 .base = {
776 .cra_name = "sha256",
777 .cra_driver_name = "crypto-eip-sha256",
778 .cra_priority = MTK_CRYPTO_PRIORITY,
779 .cra_flags = CRYPTO_ALG_ASYNC |
780 CRYPTO_ALG_KERN_DRIVER_ONLY,
781 .cra_blocksize = SHA256_BLOCK_SIZE,
782 .cra_ctxsize = sizeof(struct mtk_crypto_ahash_ctx),
783 .cra_init = mtk_crypto_ahash_cra_init,
784 .cra_exit = mtk_crypto_ahash_cra_exit,
785 .cra_module = THIS_MODULE,
786 },
787 },
788 },
789};
790
791static int mtk_crypto_sha224_init(struct ahash_request *areq)
792{
793 struct mtk_crypto_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
794 struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
795
796 memset(req, 0, sizeof(*req));
797
798 ctx->alg = MTK_CRYPTO_ALG_SHA224;
799 req->digest = MTK_CRYPTO_DIGEST_PRECOMPUTED;
800 req->state_sz = SHA256_DIGEST_SIZE;
801 req->digest_sz = SHA256_DIGEST_SIZE;
802 req->block_sz = SHA256_BLOCK_SIZE;
803
804 return 0;
805}
806
807static int mtk_crypto_sha224_digest(struct ahash_request *areq)
808{
809 int ret = mtk_crypto_sha224_init(areq);
810
811 if (ret)
812 return ret;
813
814 return mtk_crypto_ahash_finup(areq);
815}
816
817struct mtk_crypto_alg_template mtk_crypto_sha224 = {
818 .type = MTK_CRYPTO_ALG_TYPE_AHASH,
819 .alg.ahash = {
820 .init = mtk_crypto_sha224_init,
821 .update = mtk_crypto_ahash_update,
822 .final = mtk_crypto_ahash_final,
823 .finup = mtk_crypto_ahash_finup,
824 .digest = mtk_crypto_sha224_digest,
825 .export = mtk_crypto_ahash_export,
826 .import = mtk_crypto_ahash_import,
827 .halg = {
828 .digestsize = SHA224_DIGEST_SIZE,
829 .statesize = sizeof(struct mtk_crypto_ahash_export_state),
830 .base = {
831 .cra_name = "sha224",
832 .cra_driver_name = "crypto-eip-sha224",
833 .cra_priority = MTK_CRYPTO_PRIORITY,
834 .cra_flags = CRYPTO_ALG_ASYNC |
835 CRYPTO_ALG_KERN_DRIVER_ONLY,
836 .cra_blocksize = SHA224_BLOCK_SIZE,
837 .cra_ctxsize = sizeof(struct mtk_crypto_ahash_ctx),
838 .cra_init = mtk_crypto_ahash_cra_init,
839 .cra_exit = mtk_crypto_ahash_cra_exit,
840 .cra_module = THIS_MODULE,
841 },
842 },
843 },
844};
845
846static int mtk_crypto_hmac_sha224_setkey(struct crypto_ahash *tfm, const u8 *key,
847 unsigned int keylen)
848{
849 return mtk_crypto_hmac_alg_setkey(tfm, key, keylen, "crypto-eip-sha224",
850 SHA256_DIGEST_SIZE);
851}
852
853static int mtk_crypto_hmac_sha224_init(struct ahash_request *areq)
854{
855 struct mtk_crypto_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
856 struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
857
858 memset(req, 0, sizeof(*req));
859
860 memcpy(req->state, ctx->ipad, SHA224_DIGEST_SIZE);
861
862 req->sa_pointer = ctx->ipad_sa;
863 req->token_context = ctx->ipad_token;
864 req->not_first = true;
865 req->len = SHA224_BLOCK_SIZE;
866 req->processed = SHA224_BLOCK_SIZE;
867
868 ctx->alg = MTK_CRYPTO_ALG_SHA224;
869 req->digest = MTK_CRYPTO_DIGEST_PRECOMPUTED;
870 req->state_sz = SHA224_DIGEST_SIZE;
871 req->digest_sz = SHA224_DIGEST_SIZE;
872 req->block_sz = SHA224_BLOCK_SIZE;
873 req->hmac = true;
874
875 return 0;
876}
877
878static int mtk_crypto_hmac_sha224_digest(struct ahash_request *areq)
879{
880 int ret = mtk_crypto_hmac_sha224_init(areq);
881
882 if (ret)
883 return ret;
884 return mtk_crypto_ahash_finup(areq);
885}
886
887struct mtk_crypto_alg_template mtk_crypto_hmac_sha224 = {
888 .type = MTK_CRYPTO_ALG_TYPE_AHASH,
889 .alg.ahash = {
890 .init = mtk_crypto_hmac_sha224_init,
891 .update = mtk_crypto_ahash_update,
892 .final = mtk_crypto_ahash_final,
893 .finup = mtk_crypto_ahash_finup,
894 .digest = mtk_crypto_hmac_sha224_digest,
895 .setkey = mtk_crypto_hmac_sha224_setkey,
896 .export = mtk_crypto_ahash_export,
897 .import = mtk_crypto_ahash_import,
898 .halg = {
899 .digestsize = SHA224_DIGEST_SIZE,
900 .statesize = sizeof(struct mtk_crypto_ahash_export_state),
901 .base = {
902 .cra_name = "hmac(sha224)",
903 .cra_driver_name = "crypto-eip-hmac-sha224",
904 .cra_priority = MTK_CRYPTO_PRIORITY,
905 .cra_flags = CRYPTO_ALG_ASYNC |
906 CRYPTO_ALG_KERN_DRIVER_ONLY,
907 .cra_blocksize = SHA224_BLOCK_SIZE,
908 .cra_ctxsize = sizeof(struct mtk_crypto_ahash_ctx),
909 .cra_init = mtk_crypto_ahash_cra_init,
910 .cra_exit = mtk_crypto_ahash_cra_exit,
911 .cra_module = THIS_MODULE,
912 },
913 },
914 },
915};
916
917static int mtk_crypto_hmac_sha256_setkey(struct crypto_ahash *tfm, const u8 *key,
918 unsigned int keylen)
919{
920 return mtk_crypto_hmac_alg_setkey(tfm, key, keylen, "crypto-eip-sha256",
921 SHA256_DIGEST_SIZE);
922}
923
924static int mtk_crypto_hmac_sha256_init(struct ahash_request *areq)
925{
926 struct mtk_crypto_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
927 struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
928
929 memset(req, 0, sizeof(*req));
930
931 memcpy(req->state, ctx->ipad, SHA256_DIGEST_SIZE);
932 req->sa_pointer = ctx->ipad_sa;
933 req->token_context = ctx->ipad_token;
934 req->not_first = true;
935 req->len = SHA256_BLOCK_SIZE;
936 req->processed = SHA256_BLOCK_SIZE;
937
938 ctx->alg = MTK_CRYPTO_ALG_SHA256;
939 req->digest = MTK_CRYPTO_DIGEST_PRECOMPUTED;
940 req->state_sz = SHA256_DIGEST_SIZE;
941 req->digest_sz = SHA256_DIGEST_SIZE;
942 req->block_sz = SHA256_BLOCK_SIZE;
943 req->hmac = true;
944
945 return 0;
946}
947
948static int mtk_crypto_hmac_sha256_digest(struct ahash_request *areq)
949{
950 int ret = mtk_crypto_hmac_sha256_init(areq);
951
952 if (ret)
953 return ret;
954 return mtk_crypto_ahash_finup(areq);
955}
956
957struct mtk_crypto_alg_template mtk_crypto_hmac_sha256 = {
958 .type = MTK_CRYPTO_ALG_TYPE_AHASH,
959 .alg.ahash = {
960 .init = mtk_crypto_hmac_sha256_init,
961 .update = mtk_crypto_ahash_update,
962 .final = mtk_crypto_ahash_final,
963 .finup = mtk_crypto_ahash_finup,
964 .digest = mtk_crypto_hmac_sha256_digest,
965 .setkey = mtk_crypto_hmac_sha256_setkey,
966 .export = mtk_crypto_ahash_export,
967 .import = mtk_crypto_ahash_import,
968 .halg = {
969 .digestsize = SHA256_DIGEST_SIZE,
970 .statesize = sizeof(struct mtk_crypto_ahash_export_state),
971 .base = {
972 .cra_name = "hmac(sha256)",
973 .cra_driver_name = "crypto-eip-hmac-sha256",
974 .cra_priority = MTK_CRYPTO_PRIORITY,
975 .cra_flags = CRYPTO_ALG_ASYNC |
976 CRYPTO_ALG_KERN_DRIVER_ONLY,
977 .cra_blocksize = SHA256_BLOCK_SIZE,
978 .cra_ctxsize = sizeof(struct mtk_crypto_ahash_ctx),
979 .cra_init = mtk_crypto_ahash_cra_init,
980 .cra_exit = mtk_crypto_ahash_cra_exit,
981 .cra_module = THIS_MODULE,
982 },
983 },
984 },
985};
986
987static int mtk_crypto_sha512_init(struct ahash_request *areq)
988{
989 struct mtk_crypto_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
990 struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
991
992 memset(req, 0, sizeof(*req));
993
994 ctx->alg = MTK_CRYPTO_ALG_SHA512;
995 req->digest = MTK_CRYPTO_DIGEST_PRECOMPUTED;
996 req->state_sz = SHA512_DIGEST_SIZE;
997 req->digest_sz = SHA512_DIGEST_SIZE;
998 req->block_sz = SHA512_BLOCK_SIZE;
999
1000 return 0;
1001}
1002
1003static int mtk_crypto_sha512_digest(struct ahash_request *areq)
1004{
1005 int ret = mtk_crypto_sha512_init(areq);
1006
1007 if (ret)
1008 return ret;
1009 return mtk_crypto_ahash_finup(areq);
1010}
1011
1012struct mtk_crypto_alg_template mtk_crypto_sha512 = {
1013 .type = MTK_CRYPTO_ALG_TYPE_AHASH,
1014 .alg.ahash = {
1015 .init = mtk_crypto_sha512_init,
1016 .update = mtk_crypto_ahash_update,
1017 .final = mtk_crypto_ahash_final,
1018 .finup = mtk_crypto_ahash_finup,
1019 .digest = mtk_crypto_sha512_digest,
1020 .export = mtk_crypto_ahash_export,
1021 .import = mtk_crypto_ahash_import,
1022 .halg = {
1023 .digestsize = SHA512_DIGEST_SIZE,
1024 .statesize = sizeof(struct mtk_crypto_ahash_ctx),
1025 .base = {
1026 .cra_name = "sha512",
1027 .cra_driver_name = "crypto-eip-sha512",
1028 .cra_priority = MTK_CRYPTO_PRIORITY,
1029 .cra_flags = CRYPTO_ALG_ASYNC |
1030 CRYPTO_ALG_KERN_DRIVER_ONLY,
1031 .cra_blocksize = SHA512_BLOCK_SIZE,
1032 .cra_ctxsize = sizeof(struct mtk_crypto_ahash_ctx),
1033 .cra_init = mtk_crypto_ahash_cra_init,
1034 .cra_exit = mtk_crypto_ahash_cra_exit,
1035 .cra_module = THIS_MODULE,
1036 },
1037 },
1038 },
1039};
1040
1041static int mtk_crypto_sha384_init(struct ahash_request *areq)
1042{
1043 struct mtk_crypto_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1044 struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
1045
1046 memset(req, 0, sizeof(*req));
1047
1048 ctx->alg = MTK_CRYPTO_ALG_SHA384;
1049 req->digest = MTK_CRYPTO_DIGEST_PRECOMPUTED;
1050 req->state_sz = SHA512_DIGEST_SIZE;
1051 req->digest_sz = SHA512_DIGEST_SIZE;
1052 req->block_sz = SHA384_BLOCK_SIZE;
1053
1054 return 0;
1055}
1056
1057static int mtk_crypto_sha384_digest(struct ahash_request *areq)
1058{
1059 int ret = mtk_crypto_sha384_init(areq);
1060
1061 if (ret)
1062 return ret;
1063
1064 return mtk_crypto_ahash_finup(areq);
1065}
1066
1067struct mtk_crypto_alg_template mtk_crypto_sha384 = {
1068 .type = MTK_CRYPTO_ALG_TYPE_AHASH,
1069 .alg.ahash = {
1070 .init = mtk_crypto_sha384_init,
1071 .update = mtk_crypto_ahash_update,
1072 .final = mtk_crypto_ahash_final,
1073 .finup = mtk_crypto_ahash_finup,
1074 .digest = mtk_crypto_sha384_digest,
1075 .export = mtk_crypto_ahash_export,
1076 .import = mtk_crypto_ahash_import,
1077 .halg = {
1078 .digestsize = SHA384_DIGEST_SIZE,
1079 .statesize = sizeof(struct mtk_crypto_ahash_ctx),
1080 .base = {
1081 .cra_name = "sha384",
1082 .cra_driver_name = "crypto-eip-sha384",
1083 .cra_priority = MTK_CRYPTO_PRIORITY,
1084 .cra_flags = CRYPTO_ALG_ASYNC |
1085 CRYPTO_ALG_KERN_DRIVER_ONLY,
1086 .cra_blocksize = SHA384_BLOCK_SIZE,
1087 .cra_ctxsize = sizeof(struct mtk_crypto_ahash_ctx),
1088 .cra_init = mtk_crypto_ahash_cra_init,
1089 .cra_exit = mtk_crypto_ahash_cra_exit,
1090 .cra_module = THIS_MODULE,
1091 },
1092 },
1093 },
1094};
1095
1096static int mtk_crypto_hmac_sha512_setkey(struct crypto_ahash *tfm, const u8 *key,
1097 unsigned int keylen)
1098{
1099 return mtk_crypto_hmac_alg_setkey(tfm, key, keylen, "crypto-eip-sha512",
1100 SHA512_DIGEST_SIZE);
1101}
1102
1103static int mtk_crypto_hmac_sha512_init(struct ahash_request *areq)
1104{
1105 struct mtk_crypto_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1106 struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
1107
1108 memset(req, 0, sizeof(*req));
1109
1110 memcpy(req->state, ctx->ipad, SHA512_DIGEST_SIZE);
1111 req->sa_pointer = ctx->ipad_sa;
1112 req->token_context = ctx->ipad_token;
1113 req->not_first = true;
1114 req->len = SHA512_BLOCK_SIZE;
1115 req->processed = SHA512_BLOCK_SIZE;
1116
1117 ctx->alg = MTK_CRYPTO_ALG_SHA512;
1118 req->digest = MTK_CRYPTO_DIGEST_PRECOMPUTED;
1119 req->state_sz = SHA512_DIGEST_SIZE;
1120 req->digest_sz = SHA512_DIGEST_SIZE;
1121 req->block_sz = SHA512_BLOCK_SIZE;
1122 req->hmac = true;
1123
1124 return 0;
1125}
1126
1127static int mtk_crypto_hmac_sha512_digest(struct ahash_request *areq)
1128{
1129 int ret = mtk_crypto_hmac_sha512_init(areq);
1130
1131 if (ret)
1132 return ret;
1133
1134 return mtk_crypto_ahash_finup(areq);
1135}
1136
1137struct mtk_crypto_alg_template mtk_crypto_hmac_sha512 = {
1138 .type = MTK_CRYPTO_ALG_TYPE_AHASH,
1139 .alg.ahash = {
1140 .init = mtk_crypto_hmac_sha512_init,
1141 .update = mtk_crypto_ahash_update,
1142 .final = mtk_crypto_ahash_final,
1143 .finup = mtk_crypto_ahash_finup,
1144 .digest = mtk_crypto_hmac_sha512_digest,
1145 .setkey = mtk_crypto_hmac_sha512_setkey,
1146 .export = mtk_crypto_ahash_export,
1147 .import = mtk_crypto_ahash_import,
1148 .halg = {
1149 .digestsize = SHA512_DIGEST_SIZE,
1150 .statesize = sizeof(struct mtk_crypto_ahash_export_state),
1151 .base = {
1152 .cra_name = "hmac(sha512)",
1153 .cra_driver_name = "crypto-eip-hmac-sha512",
1154 .cra_priority = MTK_CRYPTO_PRIORITY,
1155 .cra_flags = CRYPTO_ALG_ASYNC |
1156 CRYPTO_ALG_KERN_DRIVER_ONLY,
1157 .cra_blocksize = SHA512_BLOCK_SIZE,
1158 .cra_ctxsize = sizeof(struct mtk_crypto_ahash_ctx),
1159 .cra_init = mtk_crypto_ahash_cra_init,
1160 .cra_exit = mtk_crypto_ahash_cra_exit,
1161 .cra_module = THIS_MODULE,
1162 },
1163 },
1164 },
1165};
1166
1167static int mtk_crypto_hmac_sha384_setkey(struct crypto_ahash *tfm, const u8 *key,
1168 unsigned int keylen)
1169{
1170 return mtk_crypto_hmac_alg_setkey(tfm, key, keylen, "crypto-eip-sha384",
1171 SHA512_DIGEST_SIZE);
1172}
1173
1174static int mtk_crypto_hmac_sha384_init(struct ahash_request *areq)
1175{
1176 struct mtk_crypto_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1177 struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
1178
1179 memset(req, 0, sizeof(*req));
1180
1181 memcpy(req->state, ctx->ipad, SHA384_DIGEST_SIZE);
1182 req->sa_pointer = ctx->ipad_sa;
1183 req->token_context = ctx->ipad_token;
1184 req->not_first = true;
1185 req->len = SHA384_BLOCK_SIZE;
1186 req->processed = SHA384_BLOCK_SIZE;
1187
1188 ctx->alg = MTK_CRYPTO_ALG_SHA384;
1189 req->digest = MTK_CRYPTO_DIGEST_PRECOMPUTED;
1190 req->state_sz = SHA384_DIGEST_SIZE;
1191 req->digest_sz = SHA384_DIGEST_SIZE;
1192 req->block_sz = SHA384_BLOCK_SIZE;
1193 req->hmac = true;
1194
1195 return 0;
1196}
1197
1198static int mtk_crypto_hmac_sha384_digest(struct ahash_request *areq)
1199{
1200 int ret = mtk_crypto_hmac_sha384_init(areq);
1201
1202 if (ret)
1203 return ret;
1204
1205 return mtk_crypto_ahash_finup(areq);
1206}
1207
1208struct mtk_crypto_alg_template mtk_crypto_hmac_sha384 = {
1209 .type = MTK_CRYPTO_ALG_TYPE_AHASH,
1210 .alg.ahash = {
1211 .init = mtk_crypto_hmac_sha384_init,
1212 .update = mtk_crypto_ahash_update,
1213 .final = mtk_crypto_ahash_final,
1214 .finup = mtk_crypto_ahash_finup,
1215 .digest = mtk_crypto_hmac_sha384_digest,
1216 .setkey = mtk_crypto_hmac_sha384_setkey,
1217 .export = mtk_crypto_ahash_export,
1218 .import = mtk_crypto_ahash_import,
1219 .halg = {
1220 .digestsize = SHA384_DIGEST_SIZE,
1221 .statesize = sizeof(struct mtk_crypto_ahash_export_state),
1222 .base = {
1223 .cra_name = "hmac(sha384)",
1224 .cra_driver_name = "crypto-eip-hmac-sha384",
1225 .cra_priority = MTK_CRYPTO_PRIORITY,
1226 .cra_flags = CRYPTO_ALG_ASYNC |
1227 CRYPTO_ALG_KERN_DRIVER_ONLY,
1228 .cra_blocksize = SHA384_BLOCK_SIZE,
1229 .cra_ctxsize = sizeof(struct mtk_crypto_ahash_ctx),
1230 .cra_init = mtk_crypto_ahash_cra_init,
1231 .cra_exit = mtk_crypto_ahash_cra_exit,
1232 .cra_module = THIS_MODULE,
1233 },
1234 },
1235 },
1236};
1237
1238static int mtk_crypto_md5_init(struct ahash_request *areq)
1239{
1240 struct mtk_crypto_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1241 struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
1242
1243 memset(req, 0, sizeof(*req));
1244
1245 ctx->alg = MTK_CRYPTO_ALG_MD5;
1246 req->digest = MTK_CRYPTO_DIGEST_PRECOMPUTED;
1247 req->state_sz = MD5_DIGEST_SIZE;
1248 req->digest_sz = MD5_DIGEST_SIZE;
1249 req->block_sz = MD5_HMAC_BLOCK_SIZE;
1250
1251 return 0;
1252}
1253
1254static int mtk_crypto_md5_digest(struct ahash_request *areq)
1255{
1256 int ret = mtk_crypto_md5_init(areq);
1257
1258 if (ret)
1259 return ret;
1260
1261 return mtk_crypto_ahash_finup(areq);
1262}
1263
1264struct mtk_crypto_alg_template mtk_crypto_md5 = {
1265 .type = MTK_CRYPTO_ALG_TYPE_AHASH,
1266 .alg.ahash = {
1267 .init = mtk_crypto_md5_init,
1268 .update = mtk_crypto_ahash_update,
1269 .final = mtk_crypto_ahash_final,
1270 .finup = mtk_crypto_ahash_finup,
1271 .digest = mtk_crypto_md5_digest,
1272 .export = mtk_crypto_ahash_export,
1273 .import = mtk_crypto_ahash_import,
1274 .halg = {
1275 .digestsize = MD5_DIGEST_SIZE,
1276 .statesize = sizeof(struct mtk_crypto_ahash_export_state),
1277 .base = {
1278 .cra_name = "md5",
1279 .cra_driver_name = "crypto-eip-md5",
1280 .cra_priority = MTK_CRYPTO_PRIORITY,
1281 .cra_flags = CRYPTO_ALG_ASYNC |
1282 CRYPTO_ALG_KERN_DRIVER_ONLY,
1283 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1284 .cra_ctxsize = sizeof(struct mtk_crypto_ahash_ctx),
1285 .cra_init = mtk_crypto_ahash_cra_init,
1286 .cra_exit = mtk_crypto_ahash_cra_exit,
1287 .cra_module = THIS_MODULE,
1288 },
1289 },
1290 },
1291};
1292
1293static int mtk_crypto_hmac_md5_init(struct ahash_request *areq)
1294{
1295 struct mtk_crypto_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1296 struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
1297
1298 memset(req, 0, sizeof(*req));
1299
1300 memcpy(req->state, ctx->ipad, MD5_DIGEST_SIZE);
1301 req->sa_pointer = ctx->ipad_sa;
1302 req->token_context = ctx->ipad_token;
1303 req->not_first = true;
1304 req->len = MD5_HMAC_BLOCK_SIZE;
1305 req->processed = MD5_HMAC_BLOCK_SIZE;
1306
1307 ctx->alg = MTK_CRYPTO_ALG_MD5;
1308 req->digest = MTK_CRYPTO_DIGEST_PRECOMPUTED;
1309 req->state_sz = MD5_DIGEST_SIZE;
1310 req->digest_sz = MD5_DIGEST_SIZE;
1311 req->block_sz = MD5_HMAC_BLOCK_SIZE;
1312 req->hmac = true;
1313
1314 return 0;
1315}
1316
1317static int mtk_crypto_hmac_md5_setkey(struct crypto_ahash *tfm, const u8 *key,
1318 unsigned int keylen)
1319{
1320 return mtk_crypto_hmac_alg_setkey(tfm, key, keylen, "crypto-eip-md5",
1321 MD5_DIGEST_SIZE);
1322}
1323
1324static int mtk_crypto_hmac_md5_digest(struct ahash_request *areq)
1325{
1326 int ret = mtk_crypto_hmac_md5_init(areq);
1327
1328 if (ret)
1329 return ret;
1330
1331 return mtk_crypto_ahash_finup(areq);
1332}
1333
1334struct mtk_crypto_alg_template mtk_crypto_hmac_md5 = {
1335 .type = MTK_CRYPTO_ALG_TYPE_AHASH,
1336 .alg.ahash = {
1337 .init = mtk_crypto_hmac_md5_init,
1338 .update = mtk_crypto_ahash_update,
1339 .final = mtk_crypto_ahash_final,
1340 .finup = mtk_crypto_ahash_finup,
1341 .digest = mtk_crypto_hmac_md5_digest,
1342 .setkey = mtk_crypto_hmac_md5_setkey,
1343 .export = mtk_crypto_ahash_export,
1344 .import = mtk_crypto_ahash_import,
1345 .halg = {
1346 .digestsize = MD5_DIGEST_SIZE,
1347 .statesize = sizeof(struct mtk_crypto_ahash_export_state),
1348 .base = {
1349 .cra_name = "hmac(md5)",
1350 .cra_driver_name = "crypto-eip-hmac-md5",
1351 .cra_priority = MTK_CRYPTO_PRIORITY,
1352 .cra_flags = CRYPTO_ALG_ASYNC |
1353 CRYPTO_ALG_KERN_DRIVER_ONLY,
1354 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1355 .cra_ctxsize = sizeof(struct mtk_crypto_ahash_ctx),
1356 .cra_init = mtk_crypto_ahash_cra_init,
1357 .cra_exit = mtk_crypto_ahash_cra_exit,
1358 .cra_module = THIS_MODULE,
1359 },
1360 },
1361 },
1362};
1363
1364static int mtk_crypto_cbcmac_init(struct ahash_request *areq)
1365{
1366 struct mtk_crypto_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1367 struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
1368
1369 memset(req, 0, sizeof(*req));
1370 memset(req->state, 0, sizeof(u32) * (SHA512_DIGEST_SIZE / sizeof(u32)));
1371
1372 req->len = AES_BLOCK_SIZE;
1373 req->processed = AES_BLOCK_SIZE;
1374
1375 req->digest = MTK_CRYPTO_DIGEST_XCM;
1376 req->state_sz = ctx->key_sz;
1377 req->digest_sz = AES_BLOCK_SIZE;
1378 req->block_sz = AES_BLOCK_SIZE;
1379 req->xcbcmac = true;
1380
1381 return 0;
1382}
1383
1384static int mtk_crypto_cbcmac_digest(struct ahash_request *areq)
1385{
1386 return mtk_crypto_cbcmac_init(areq) ?: mtk_crypto_ahash_finup(areq);
1387}
1388
1389static int mtk_crypto_xcbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
1390 unsigned int len)
1391{
1392 struct mtk_crypto_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1393 struct crypto_aes_ctx aes;
1394 u32 key_tmp[3 * AES_BLOCK_SIZE / sizeof(u32)];
1395 int ret, i;
1396
1397 ret = aes_expandkey(&aes, key, len);
1398 if (ret)
1399 return ret;
1400
1401 crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
1402 crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) & CRYPTO_TFM_REQ_MASK);
1403
1404 ret = crypto_cipher_setkey(ctx->kaes, key, len);
1405 if (ret)
1406 return ret;
1407
1408 crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
1409 "\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1");
1410 crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp,
1411 "\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2");
1412 crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + AES_BLOCK_SIZE,
1413 "\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3");
1414
1415 for (i = 0; i < 3 * AES_BLOCK_SIZE / sizeof(u32); i++)
1416 ctx->ipad[i] =
1417 (__force u32)cpu_to_be32(key_tmp[i]);
1418
1419 crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
1420 crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
1421 CRYPTO_TFM_REQ_MASK);
1422 ret = crypto_cipher_setkey(ctx->kaes, (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
1423 AES_MIN_KEY_SIZE);
1424
1425 if (ret)
1426 return ret;
1427
1428 ctx->alg = MTK_CRYPTO_ALG_XCBC;
1429 ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
1430 ctx->cbcmac = false;
1431
1432 memzero_explicit(&aes, sizeof(aes));
1433 return 0;
1434}
1435
1436static int mtk_crypto_xcbcmac_cra_init(struct crypto_tfm *tfm)
1437{
1438 struct mtk_crypto_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
1439
1440 mtk_crypto_ahash_cra_init(tfm);
1441 ctx->kaes = crypto_alloc_cipher("aes", 0, 0);
1442 return PTR_ERR_OR_ZERO(ctx->kaes);
1443}
1444
1445static void mtk_crypto_xcbcmac_cra_exit(struct crypto_tfm *tfm)
1446{
1447 struct mtk_crypto_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
1448
1449 crypto_free_cipher(ctx->kaes);
1450 mtk_crypto_ahash_cra_exit(tfm);
1451}
1452
1453struct mtk_crypto_alg_template mtk_crypto_xcbcmac = {
1454 .type = MTK_CRYPTO_ALG_TYPE_AHASH,
1455 .alg.ahash = {
1456 .init = mtk_crypto_cbcmac_init,
1457 .update = mtk_crypto_ahash_update,
1458 .final = mtk_crypto_ahash_final,
1459 .finup = mtk_crypto_ahash_finup,
1460 .digest = mtk_crypto_cbcmac_digest,
1461 .setkey = mtk_crypto_xcbcmac_setkey,
1462 .export = mtk_crypto_ahash_export,
1463 .import = mtk_crypto_ahash_import,
1464 .halg = {
1465 .digestsize = AES_BLOCK_SIZE,
1466 .statesize = sizeof(struct mtk_crypto_ahash_export_state),
1467 .base = {
1468 .cra_name = "xcbc(aes)",
1469 .cra_driver_name = "crypto-eip-xcbc-aes",
1470 .cra_priority = MTK_CRYPTO_PRIORITY,
1471 .cra_flags = CRYPTO_ALG_ASYNC |
1472 CRYPTO_ALG_KERN_DRIVER_ONLY,
1473 .cra_blocksize = AES_BLOCK_SIZE,
1474 .cra_ctxsize = sizeof(struct mtk_crypto_ahash_ctx),
1475 .cra_init = mtk_crypto_xcbcmac_cra_init,
1476 .cra_exit = mtk_crypto_xcbcmac_cra_exit,
1477 .cra_module = THIS_MODULE,
1478 },
1479 },
1480 },
1481};
1482
1483static int mtk_crypto_cmac_setkey(struct crypto_ahash *tfm, const u8 *key,
1484 unsigned int len)
1485{
1486 struct mtk_crypto_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1487 struct crypto_aes_ctx aes;
1488 __be64 consts[4];
1489 u64 _const[2];
1490 u8 msb_mask, gfmask;
1491 int ret, i;
1492
1493 ret = aes_expandkey(&aes, key, len);
1494 if (ret)
1495 return ret;
1496
1497 crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
1498 crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
1499 CRYPTO_TFM_REQ_MASK);
1500 ret = crypto_cipher_setkey(ctx->kaes, key, len);
1501 if (ret)
1502 return ret;
1503
1504 /* code below borrowed from crypto/cmac.c */
1505 /* encrypt the zero block */
1506 memset(consts, 0, AES_BLOCK_SIZE);
1507 crypto_cipher_encrypt_one(ctx->kaes, (u8 *) consts, (u8 *) consts);
1508
1509 gfmask = 0x87;
1510 _const[0] = be64_to_cpu(consts[1]);
1511 _const[1] = be64_to_cpu(consts[0]);
1512
1513 /* gf(2^128) multiply zero-ciphertext with u and u^2 */
1514 for (i = 0; i < 4; i += 2) {
1515 msb_mask = ((s64)_const[1] >> 63) & gfmask;
1516 _const[1] = (_const[1] << 1) | (_const[0] >> 63);
1517 _const[0] = (_const[0] << 1) ^ msb_mask;
1518
1519 consts[i + 0] = cpu_to_be64(_const[1]);
1520 consts[i + 1] = cpu_to_be64(_const[0]);
1521 }
1522 /* end of code borrowed from crypto/cmac.c */
1523
1524 for (i = 0; i < 2 * AES_BLOCK_SIZE / sizeof(u32); i++)
1525 ctx->ipad[i] = (__force __le32)cpu_to_be32(((u32 *) consts)[i]);
1526 memcpy((uint8_t *) ctx->ipad + 2 * AES_BLOCK_SIZE, key, len);
1527
1528 if (len == AES_KEYSIZE_192) {
1529 ctx->alg = MTK_CRYPTO_ALG_CMAC_192;
1530 ctx->key_sz = 24 + 2 * AES_BLOCK_SIZE;
1531 } else if (len == AES_KEYSIZE_256) {
1532 ctx->alg = MTK_CRYPTO_ALG_CMAC_256;
1533 ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
1534 } else {
1535 ctx->alg = MTK_CRYPTO_ALG_CMAC_128;
1536 ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
1537 }
1538 ctx->cbcmac = false;
1539
1540 memzero_explicit(&aes, sizeof(aes));
1541 return 0;
1542}
1543
1544struct mtk_crypto_alg_template mtk_crypto_cmac = {
1545 .type = MTK_CRYPTO_ALG_TYPE_AHASH,
1546 .alg.ahash = {
1547 .init = mtk_crypto_cbcmac_init,
1548 .update = mtk_crypto_ahash_update,
1549 .final = mtk_crypto_ahash_final,
1550 .finup = mtk_crypto_ahash_finup,
1551 .digest = mtk_crypto_cbcmac_digest,
1552 .setkey = mtk_crypto_cmac_setkey,
1553 .export = mtk_crypto_ahash_export,
1554 .import = mtk_crypto_ahash_import,
1555 .halg = {
1556 .digestsize = AES_BLOCK_SIZE,
1557 .statesize = sizeof(struct mtk_crypto_ahash_export_state),
1558 .base = {
1559 .cra_name = "cmac(aes)",
1560 .cra_driver_name = "crypto-eip-cmac-aes",
1561 .cra_priority = MTK_CRYPTO_PRIORITY,
1562 .cra_flags = CRYPTO_ALG_ASYNC |
1563 CRYPTO_ALG_KERN_DRIVER_ONLY,
1564 .cra_blocksize = AES_BLOCK_SIZE,
1565 .cra_ctxsize = sizeof(struct mtk_crypto_ahash_ctx),
1566 .cra_init = mtk_crypto_xcbcmac_cra_init,
1567 .cra_exit = mtk_crypto_xcbcmac_cra_exit,
1568 .cra_module = THIS_MODULE,
1569 },
1570 },
1571 },
1572};
1573