[][openwrt][mt7988][crypto][Add look-aside mode]
[Description]
Add look-aside mode. Similar to the upstream driver, register the
algorithm with the Linux crypto subsystem to provide encryption and
decryption algorithms for use by other modules.
The construction of transform records, command descriptors,
tokens, and packets is implemented through the APIs provided by the DDK.
All suportted algorithms can check src/init.c, there is a list for all
supported algo. However, the IPSec throught put is only 300 Mbps for
AES-CBC-HMAC-SHA1 while the upstream driver is 1000 Mbps. Still need to
enhance performance.
[Release-log]
N/A
Change-Id: I15d2550cb3190c47a79da82c8654dea6647bd101
Reviewed-on: https://gerrit.mediatek.inc/c/openwrt/feeds/mtk_openwrt_feeds/+/8838386
diff --git a/feed/kernel/crypto-eip/src/Makefile b/feed/kernel/crypto-eip/src/Makefile
index 6beba79..d09ede3 100644
--- a/feed/kernel/crypto-eip/src/Makefile
+++ b/feed/kernel/crypto-eip/src/Makefile
@@ -11,6 +11,9 @@
crypto-eip-inline-y += init.o
crypto-eip-inline-y += ddk-wrapper.o
+crypto-eip-inline-y += lookaside.o
+crypto-eip-inline-y += lookaside-cipher.o
+crypto-eip-inline-y += lookaside-hash.o
crypto-eip-inline-$(CONFIG_CRYPTO_XFRM_OFFLOAD_MTK_PCE) += xfrm-offload.o
diff --git a/feed/kernel/crypto-eip/src/ddk-wrapper.c b/feed/kernel/crypto-eip/src/ddk-wrapper.c
index 1fb1e3d..c07b2de 100644
--- a/feed/kernel/crypto-eip/src/ddk-wrapper.c
+++ b/feed/kernel/crypto-eip/src/ddk-wrapper.c
@@ -20,7 +20,61 @@
#include "crypto-eip/crypto-eip.h"
#include "crypto-eip/ddk-wrapper.h"
#include "crypto-eip/internal.h"
-#include "crypto-eip/crypto-eip197-inline-ddk.h"
+
+LIST_HEAD(result_list);
+
+void crypto_free_sa(void *sa_pointer)
+{
+ DMABuf_Handle_t SAHandle = {0};
+
+ SAHandle.p = sa_pointer;
+ PEC_SA_UnRegister(PEC_INTERFACE_ID, SAHandle, DMABuf_NULLHandle,
+ DMABuf_NULLHandle);
+ DMABuf_Release(SAHandle);
+}
+
+void crypto_free_token(void *token)
+{
+ DMABuf_Handle_t TokenHandle = {0};
+
+ TokenHandle.p = token;
+ DMABuf_Release(TokenHandle);
+}
+
+/* TODO: to be remove*/
+void crypto_free_pkt(void *pkt)
+{
+ DMABuf_Handle_t PktHandle = {0};
+
+ PktHandle.p = pkt;
+ DMABuf_Release(PktHandle);
+}
+
+void crypto_free_sglist(void *sglist)
+{
+ PEC_Status_t res;
+ unsigned int count;
+ unsigned int size;
+ DMABuf_Handle_t SGListHandle = {0};
+ DMABuf_Handle_t ParticleHandle = {0};
+ int i;
+ uint8_t *Particle_p;
+
+ SGListHandle.p = sglist;
+ res = PEC_SGList_GetCapacity(SGListHandle, &count);
+ if (res != PEC_STATUS_OK)
+ return;
+ for (i = 0; i < count; i++) {
+ PEC_SGList_Read(SGListHandle,
+ i,
+ &ParticleHandle,
+ &size,
+ &Particle_p);
+ DMABuf_Particle_Release(ParticleHandle);
+ }
+
+ PEC_SGList_Destroy(SGListHandle);
+}
static bool crypto_iotoken_create(IOToken_Input_Dscr_t * const dscr_p,
void * const ext_p, u32 *data_p,
@@ -28,75 +82,1610 @@
{
int IOTokenRc;
+ dscr_p->InPacket_ByteCount = pec_cmd_dscr->SrcPkt_ByteCount;
+ dscr_p->Ext_p = ext_p;
+
+ IOTokenRc = IOToken_Create(dscr_p, data_p);
+ if (IOTokenRc < 0) {
+ CRYPTO_ERR("IOToken_Create error %d\n", IOTokenRc);
+ return false;
+ }
+
+ pec_cmd_dscr->InputToken_p = data_p;
+
+ return true;
+}
+
+unsigned int crypto_pe_busy_get_one(IOToken_Output_Dscr_t *const OutTokenDscr_p,
+ u32 *OutTokenData_p,
+ PEC_ResultDescriptor_t *RD_p)
+{
+ int LoopCounter = MTK_EIP197_INLINE_NOF_TRIES;
+ int IOToken_Rc;
+ PEC_Status_t pecres;
+
+ ZEROINIT(*OutTokenDscr_p);
+ ZEROINIT(*RD_p);
+
+ /* Link data structures */
+ RD_p->OutputToken_p = OutTokenData_p;
+
+ while (LoopCounter > 0) {
+ /* Try to get the processed packet from the driver */
+ unsigned int Counter = 0;
+
+ pecres = PEC_Packet_Get(PEC_INTERFACE_ID, RD_p, 1, &Counter);
+ if (pecres != PEC_STATUS_OK) {
+ /* IO error */
+ CRYPTO_ERR("PEC_Packet_Get error %d\n", pecres);
+ return 0;
+ }
+
+ if (Counter) {
+ IOToken_Rc = IOToken_Parse(OutTokenData_p, OutTokenDscr_p);
+ if (IOToken_Rc < 0) {
+ /* IO error */
+ CRYPTO_ERR("IOToken_Parse error %d\n", IOToken_Rc);
+ return 0;
+ }
+
+ if (OutTokenDscr_p->ErrorCode != 0) {
+ /* Packet process error */
+ CRYPTO_ERR("Result descriptor error 0x%x\n",
+ OutTokenDscr_p->ErrorCode);
+ return 0;
+ }
+
+ /* packet received */
+ return Counter;
+ }
+
+ /* Wait for MTK_EIP197_PKT_GET_TIMEOUT_MS milliseconds */
+ udelay(MTK_EIP197_PKT_GET_TIMEOUT_MS);
+ LoopCounter--;
+ }
+
+ /* IO error (timeout, not result packet received) */
+ return 0;
+}
+
+unsigned int crypto_pe_get_one(IOToken_Output_Dscr_t *const OutTokenDscr_p,
+ u32 *OutTokenData_p,
+ PEC_ResultDescriptor_t *RD_p)
+{
+ int IOToken_Rc;
+ unsigned int Counter = 0;
+ PEC_Status_t pecres;
+
+ ZEROINIT(*OutTokenDscr_p);
+ ZEROINIT(*RD_p);
+
+ RD_p->OutputToken_p = OutTokenData_p;
+
+ /* Try to get the processed packet from the driver */
+ pecres = PEC_Packet_Get(PEC_INTERFACE_ID, RD_p, 1, &Counter);
+ if (pecres != PEC_STATUS_OK) {
+ /* IO error */
+ CRYPTO_ERR("PEC_Packet_Get error %d\n", pecres);
+ return 0;
+ }
+
+ if (Counter) {
+ IOToken_Rc = IOToken_Parse(OutTokenData_p, OutTokenDscr_p);
+ if (IOToken_Rc < 0) {
+ /* IO error */
+ CRYPTO_ERR("IOToken_Parse error %d\n", IOToken_Rc);
+ return 0;
+ }
+ if (OutTokenDscr_p->ErrorCode != 0) {
+ /* Packet process error */
+ CRYPTO_ERR("Result descriptor error 0x%x\n",
+ OutTokenDscr_p->ErrorCode);
+ return 0;
+ }
+ /* packet received */
+ return Counter;
+ }
+
+ /* IO error (timeout, not result packet received) */
+ return 0;
+}
+
+SABuilder_Crypto_Mode_t lookaside_match_alg_mode(enum mtk_crypto_cipher_mode mode)
+{
+ switch (mode) {
+ case MTK_CRYPTO_MODE_CBC:
+ return SAB_CRYPTO_MODE_CBC;
+ case MTK_CRYPTO_MODE_ECB:
+ return SAB_CRYPTO_MODE_ECB;
+ case MTK_CRYPTO_MODE_OFB:
+ return SAB_CRYPTO_MODE_OFB;
+ case MTK_CRYPTO_MODE_CFB:
+ return SAB_CRYPTO_MODE_CFB;
+ case MTK_CRYPTO_MODE_CTR:
+ return SAB_CRYPTO_MODE_CTR;
+ case MTK_CRYPTO_MODE_GCM:
+ return SAB_CRYPTO_MODE_GCM;
+ case MTK_CRYPTO_MODE_GMAC:
+ return SAB_CRYPTO_MODE_GMAC;
+ case MTK_CRYPTO_MODE_CCM:
+ return SAB_CRYPTO_MODE_CCM;
+ default:
+ return SAB_CRYPTO_MODE_BASIC;
+ }
+}
+
+SABuilder_Crypto_t lookaside_match_alg_name(enum mtk_crypto_alg alg)
+{
+ switch (alg) {
+ case MTK_CRYPTO_AES:
+ return SAB_CRYPTO_AES;
+ case MTK_CRYPTO_DES:
+ return SAB_CRYPTO_DES;
+ case MTK_CRYPTO_3DES:
+ return SAB_CRYPTO_3DES;
+ default:
+ return SAB_CRYPTO_NULL;
+ }
+}
+
+SABuilder_Auth_t aead_hash_match(enum mtk_crypto_alg alg)
+{
+ switch (alg) {
+ case MTK_CRYPTO_ALG_SHA1:
+ return SAB_AUTH_HMAC_SHA1;
+ case MTK_CRYPTO_ALG_SHA224:
+ return SAB_AUTH_HMAC_SHA2_224;
+ case MTK_CRYPTO_ALG_SHA256:
+ return SAB_AUTH_HMAC_SHA2_256;
+ case MTK_CRYPTO_ALG_SHA384:
+ return SAB_AUTH_HMAC_SHA2_384;
+ case MTK_CRYPTO_ALG_SHA512:
+ return SAB_AUTH_HMAC_SHA2_512;
+ case MTK_CRYPTO_ALG_MD5:
+ return SAB_AUTH_HMAC_MD5;
+ case MTK_CRYPTO_ALG_GCM:
+ return SAB_AUTH_AES_GCM;
+ case MTK_CRYPTO_ALG_GMAC:
+ return SAB_AUTH_AES_GMAC;
+ case MTK_CRYPTO_ALG_CCM:
+ return SAB_AUTH_AES_CCM;
+ default:
+ return SAB_AUTH_NULL;
+ }
+}
+
+void mtk_crypto_interrupt_handler(void)
+{
+ struct mtk_crypto_result *rd;
+ struct mtk_crypto_context *ctx;
+ IOToken_Output_Dscr_t OutTokenDscr;
+ PEC_ResultDescriptor_t Res;
+ uint32_t OutputToken[IOTOKEN_OUT_WORD_COUNT];
+ int ret = 0;
+
+ while (true) {
+ if (list_empty(&result_list))
+ return;
+ rd = list_first_entry(&result_list, struct mtk_crypto_result, list);
+
+ if (crypto_pe_get_one(&OutTokenDscr, OutputToken, &Res) < 1) {
+ PEC_NotifyFunction_t CBFunc;
+
+ CBFunc = mtk_crypto_interrupt_handler;
+ if (OutTokenDscr.ErrorCode == 0) {
+ PEC_ResultNotify_Request(PEC_INTERFACE_ID, CBFunc, 1);
+ return;
+ } else if (OutTokenDscr.ErrorCode & BIT(9)) {
+ ret = -EBADMSG;
+ } else if (OutTokenDscr.ErrorCode == 0x4003) {
+ ret = 0;
+ } else
+ ret = 1;
+
+ CRYPTO_ERR("error from crypto_pe_get_one: %d\n", ret);
+ }
+
+ ctx = crypto_tfm_ctx(rd->async->tfm);
+ ret = ctx->handle_result(rd, ret);
+
+ spin_lock_bh(&add_lock);
+ list_del(&rd->list);
+ spin_unlock_bh(&add_lock);
+ kfree(rd);
+ }
+}
+
+int crypto_aead_cipher(struct crypto_async_request *async, struct mtk_crypto_cipher_req *mtk_req,
+ struct scatterlist *src, struct scatterlist *dst, unsigned int cryptlen,
+ unsigned int assoclen, unsigned int digestsize, u8 *iv, unsigned int ivsize)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(async->tfm);
+ struct mtk_crypto_result *result;
+ struct scatterlist *sg;
+ unsigned int totlen_src;
+ unsigned int totlen_dst;
+ unsigned int src_pkt = cryptlen + assoclen;
+ unsigned int pass_assoc = 0;
+ int pass_id;
+ int rc;
+ int i;
+ SABuilder_Params_t params;
+ SABuilder_Params_Basic_t ProtocolParams;
+ unsigned int SAWords = 0;
+
+ DMABuf_Status_t DMAStatus;
+ DMABuf_Properties_t DMAProperties = {0, 0, 0, 0};
+ DMABuf_HostAddress_t SAHostAddress;
+ DMABuf_HostAddress_t TokenHostAddress;
+ DMABuf_HostAddress_t PktHostAddress;
+
+ DMABuf_Handle_t SAHandle = {0};
+ DMABuf_Handle_t TokenHandle = {0};
+ DMABuf_Handle_t SrcSGListHandle = {0};
+ DMABuf_Handle_t DstSGListHandle = {0};
+
+ unsigned int TCRWords = 0;
+ void *TCRData = 0;
+ unsigned int TokenWords = 0;
+ unsigned int TokenHeaderWord;
+ unsigned int TokenMaxWords = 0;
+
+ TokenBuilder_Params_t TokenParams;
+ PEC_CommandDescriptor_t Cmd;
+ PEC_NotifyFunction_t CBFunc;
+ unsigned int count;
+
+ IOToken_Input_Dscr_t InTokenDscr;
+ IOToken_Output_Dscr_t OutTokenDscr;
+ uint32_t InputToken[IOTOKEN_IN_WORD_COUNT];
+ void *InTokenDscrExt_p = NULL;
+ uint8_t gcm_iv[16] = {0};
+ uint8_t *aad = NULL;
+
+#ifdef CRYPTO_IOTOKEN_EXT
+ IOToken_Input_Dscr_Ext_t InTokenDscrExt;
+
+ ZEROINIT(InTokenDscrExt);
+ InTokenDscrExt_p = &InTokenDscrExt;
+#endif
+ ZEROINIT(InTokenDscr);
+ ZEROINIT(OutTokenDscr);
+
+ /* Init SA */
+ if (mtk_req->direction == MTK_CRYPTO_ENCRYPT) {
+ totlen_src = cryptlen + assoclen;
+ totlen_dst = totlen_src + digestsize;
+ rc = SABuilder_Init_Basic(¶ms, &ProtocolParams, SAB_DIRECTION_OUTBOUND);
+ } else {
+ totlen_src = cryptlen + assoclen;
+ totlen_dst = totlen_src - digestsize;
+ rc = SABuilder_Init_Basic(¶ms, &ProtocolParams, SAB_DIRECTION_INBOUND);
+ }
+ if (rc) {
+ CRYPTO_ERR("SABuilder_Init_Basic failed: %d\n", rc);
+ goto error_exit;
+ }
+
+ /* Build SA */
+ params.CryptoAlgo = lookaside_match_alg_name(ctx->alg);
+ params.CryptoMode = lookaside_match_alg_mode(ctx->mode);
+ params.KeyByteCount = ctx->key_len;
+ params.Key_p = (uint8_t *) ctx->key;
+ if (params.CryptoMode == SAB_CRYPTO_MODE_GCM && ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
+ params.Nonce_p = (uint8_t *) &ctx->nonce;
+ params.IVSrc = SAB_IV_SRC_TOKEN;
+ params.flags |= SAB_FLAG_COPY_IV;
+ memcpy(gcm_iv, &ctx->nonce, 4);
+ memcpy(gcm_iv + 4, iv, ivsize);
+ gcm_iv[15] = 1;
+ } else if (params.CryptoMode == SAB_CRYPTO_MODE_GMAC) {
+ params.Nonce_p = (uint8_t *) &ctx->nonce;
+ params.IVSrc = SAB_IV_SRC_TOKEN;
+ memcpy(gcm_iv, &ctx->nonce, 4);
+ memcpy(gcm_iv + 4, iv, ivsize);
+ gcm_iv[15] = 1;
+ } else if (params.CryptoMode == SAB_CRYPTO_MODE_GCM) {
+ params.IVSrc = SAB_IV_SRC_TOKEN;
+ memcpy(gcm_iv, iv, ivsize);
+ gcm_iv[15] = 1;
+ } else if (params.CryptoMode == SAB_CRYPTO_MODE_CCM) {
+ params.IVSrc = SAB_IV_SRC_SA;
+ params.Nonce_p = (uint8_t *) &ctx->nonce + 1;
+ params.IV_p = iv;
+ } else {
+ params.IVSrc = SAB_IV_SRC_SA;
+ params.IV_p = iv;
+ }
+
+ if (params.CryptoMode == SAB_CRYPTO_MODE_CTR)
+ params.Nonce_p = (uint8_t *) &ctx->nonce;
+
+ params.AuthAlgo = aead_hash_match(ctx->hash_alg);
+ params.AuthKey1_p = (uint8_t *) ctx->ipad;
+ params.AuthKey2_p = (uint8_t *) ctx->opad;
+
+ ProtocolParams.ICVByteCount = digestsize;
+
+ rc = SABuilder_GetSizes(¶ms, &SAWords, NULL, NULL);
+ if (rc) {
+ CRYPTO_ERR("SA not created because of size errors: %d\n", rc);
+ goto error_remove_sg;
+ }
+
+ DMAProperties.fCached = true;
+ DMAProperties.Alignment = MTK_EIP197_INLINE_DMA_ALIGNMENT_BYTE_COUNT;
+ DMAProperties.Bank = MTK_EIP197_INLINE_BANK_TRANSFORM;
+ DMAProperties.Size = MAX(4*SAWords, 256);
+
+ DMAStatus = DMABuf_Alloc(DMAProperties, &SAHostAddress, &SAHandle);
+ if (DMAStatus != DMABUF_STATUS_OK) {
+ rc = 1;
+ CRYPTO_ERR("Allocation of SA failed: %d\n", DMAStatus);
+ goto error_remove_sg;
+ }
+
+ rc = SABuilder_BuildSA(¶ms, (u32 *)SAHostAddress.p, NULL, NULL);
+ if (rc) {
+ CRYPTO_ERR("SA not created because of errors: %d\n", rc);
+ goto error_remove_sg;
+ }
+
+ /* Check dst buffer has enough size */
+ mtk_req->nr_src = sg_nents_for_len(src, totlen_src);
+ mtk_req->nr_dst = sg_nents_for_len(dst, totlen_dst);
+
+ if (src == dst) {
+ mtk_req->nr_src = max(mtk_req->nr_src, mtk_req->nr_dst);
+ mtk_req->nr_dst = mtk_req->nr_src;
+ if (unlikely((totlen_src || totlen_dst) && (mtk_req->nr_src <= 0))) {
+ CRYPTO_ERR("In-place buffer not large enough\n");
+ return -EINVAL;
+ }
+ dma_map_sg(crypto_dev, src, mtk_req->nr_src, DMA_BIDIRECTIONAL);
+ } else {
+ if (unlikely(totlen_src && (mtk_req->nr_src <= 0))) {
+ CRYPTO_ERR("Source buffer not large enough\n");
+ return -EINVAL;
+ }
+ dma_map_sg(crypto_dev, src, mtk_req->nr_src, DMA_TO_DEVICE);
+
+ if (unlikely(totlen_dst && (mtk_req->nr_dst <= 0))) {
+ CRYPTO_ERR("Dest buffer not large enough\n");
+ dma_unmap_sg(crypto_dev, src, mtk_req->nr_src, DMA_TO_DEVICE);
+ return -EINVAL;
+ }
+ dma_map_sg(crypto_dev, dst, mtk_req->nr_dst, DMA_FROM_DEVICE);
+ }
+
+ if (params.CryptoMode == SAB_CRYPTO_MODE_CCM ||
+ (params.CryptoMode == SAB_CRYPTO_MODE_GCM &&
+ ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP)) {
+
+ aad = kmalloc(assoclen, GFP_KERNEL);
+ if (!aad)
+ goto error_remove_sg;
+ sg_copy_to_buffer(src, mtk_req->nr_src, aad, assoclen);
+ src_pkt -= assoclen;
+ pass_assoc = assoclen;
+ }
+
+ /* Assign sg list */
+ rc = PEC_SGList_Create(MAX(mtk_req->nr_src, 1), &SrcSGListHandle);
+ if (rc != PEC_STATUS_OK) {
+ CRYPTO_ERR("PEC_SGList_Create src failed with rc = %d\n", rc);
+ goto error_remove_sg;
+ }
+
+ pass_id = 0;
+ DMAProperties.fCached = true;
+ DMAProperties.Alignment = MTK_EIP197_INLINE_DMA_ALIGNMENT_BYTE_COUNT;
+ DMAProperties.Bank = MTK_EIP197_INLINE_BANK_PACKET;
+ for_each_sg(src, sg, mtk_req->nr_src, i) {
+ int len = sg_dma_len(sg);
+ DMABuf_Handle_t sg_handle;
+ DMABuf_HostAddress_t host;
+
+ if (totlen_src < len)
+ len = totlen_src;
+
+ if (pass_assoc) {
+ if (pass_assoc >= len) {
+ pass_assoc -= len;
+ pass_id++;
+ continue;
+ }
+ DMAProperties.Size = MAX(len - pass_assoc, 1);
+ rc = DMABuf_Particle_Alloc(DMAProperties, sg_dma_address(sg) + pass_assoc,
+ &host, &sg_handle);
+ if (rc != DMABUF_STATUS_OK) {
+ CRYPTO_ERR("DMABuf_Particle_Alloc failed rc = %d\n", rc);
+ goto error_remove_sg;
+ }
+ rc = PEC_SGList_Write(SrcSGListHandle, i - pass_id, sg_handle,
+ len - pass_assoc);
+ if (rc != PEC_STATUS_OK)
+ pr_notice("PEC_SGList_Write failed rc = %d\n", rc);
+ pass_assoc = 0;
+ } else {
+ DMAProperties.Size = MAX(len, 1);
+ rc = DMABuf_Particle_Alloc(DMAProperties, sg_dma_address(sg),
+ &host, &sg_handle);
+ if (rc != DMABUF_STATUS_OK) {
+ CRYPTO_ERR("DMABuf_Particle_Alloc failed rc = %d\n", rc);
+ goto error_remove_sg;
+ }
+
+ rc = PEC_SGList_Write(SrcSGListHandle, i - pass_id, sg_handle, len);
+ if (rc != PEC_STATUS_OK)
+ pr_notice("PEC_SGList_Write failed rc = %d\n", rc);
+ }
+
+ totlen_src -= len;
+ if (!totlen_src)
+ break;
+ }
+
+ /* Alloc sg list for result */
+ rc = PEC_SGList_Create(MAX(mtk_req->nr_dst, 1), &DstSGListHandle);
+ if (rc != PEC_STATUS_OK) {
+ CRYPTO_ERR("PEC_SGList_Create dst failed with rc = %d\n", rc);
+ goto error_remove_sg;
+ }
+
+ for_each_sg(dst, sg, mtk_req->nr_dst, i) {
+ int len = sg_dma_len(sg);
+ DMABuf_Handle_t sg_handle;
+ DMABuf_HostAddress_t host;
+
+ if (len > totlen_dst)
+ len = totlen_dst;
+
+ DMAProperties.Size = MAX(len, 1);
+ rc = DMABuf_Particle_Alloc(DMAProperties, sg_dma_address(sg), &host, &sg_handle);
+ if (rc != DMABUF_STATUS_OK) {
+ CRYPTO_ERR("DMABuf_Particle_Alloc failed rc = %d\n", rc);
+ goto error_remove_sg;
+ }
+ rc = PEC_SGList_Write(DstSGListHandle, i, sg_handle, len);
+ if (rc != PEC_STATUS_OK)
+ pr_notice("PEC_SGList_Write failed rc = %d\n", rc);
+
+ if (unlikely(!len))
+ break;
+ totlen_dst -= len;
+ }
+
+ /* Build Token */
+ rc = TokenBuilder_GetContextSize(¶ms, &TCRWords);
+ if (rc) {
+ CRYPTO_ERR("TokenBuilder_GetContextSize returned errors: %d\n", rc);
+ goto error_remove_sg;
+ }
+
+ TCRData = kmalloc(4 * TCRWords, GFP_KERNEL);
+ if (!TCRData) {
+ rc = 1;
+ CRYPTO_ERR("Allocation of TCR failed\n");
+ goto error_remove_sg;
+ }
+
+ rc = TokenBuilder_BuildContext(¶ms, TCRData);
+ if (rc) {
+ CRYPTO_ERR("TokenBuilder_BuildContext failed: %d\n", rc);
+ goto error_remove_sg;
+ }
+
+ rc = TokenBuilder_GetSize(TCRData, &TokenMaxWords);
+ if (rc) {
+ CRYPTO_ERR("TokenBuilder_GetSize failed: %d\n", rc);
+ goto error_remove_sg;
+ }
+
+ DMAProperties.fCached = true;
+ DMAProperties.Alignment = MTK_EIP197_INLINE_DMA_ALIGNMENT_BYTE_COUNT;
+ DMAProperties.Bank = MTK_EIP197_INLINE_BANK_TOKEN;
+ DMAProperties.Size = 4*TokenMaxWords;
+
+ DMAStatus = DMABuf_Alloc(DMAProperties, &TokenHostAddress, &TokenHandle);
+ if (DMAStatus != DMABUF_STATUS_OK) {
+ rc = 1;
+ CRYPTO_ERR("Allocation of token builder failed: %d\n", DMAStatus);
+ goto error_remove_sg;
+ }
+
+ rc = PEC_SA_Register(PEC_INTERFACE_ID, SAHandle, DMABuf_NULLHandle,
+ DMABuf_NULLHandle);
+ if (rc != PEC_STATUS_OK) {
+ CRYPTO_ERR("PEC_SA_Register failed: %d\n", rc);
+ goto error_remove_sg;
+ }
+
+ ZEROINIT(TokenParams);
+
+ if (params.CryptoMode == SAB_CRYPTO_MODE_GCM || params.CryptoMode == SAB_CRYPTO_MODE_GMAC)
+ TokenParams.IV_p = gcm_iv;
+
+ if ((params.CryptoMode == SAB_CRYPTO_MODE_GCM && ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) ||
+ params.CryptoMode == SAB_CRYPTO_MODE_CCM) {
+ TokenParams.AdditionalValue = assoclen - ivsize;
+ TokenParams.AAD_p = aad;
+ } else if (params.CryptoMode != SAB_CRYPTO_MODE_GMAC)
+ TokenParams.AdditionalValue = assoclen;
+
+
+ PktHostAddress.p = kmalloc(sizeof(uint8_t), GFP_KERNEL);
+ rc = TokenBuilder_BuildToken(TCRData, (uint8_t *)PktHostAddress.p, src_pkt,
+ &TokenParams, (uint32_t *)TokenHostAddress.p,
+ &TokenWords, &TokenHeaderWord);
+ kfree(PktHostAddress.p);
+ if (rc != TKB_STATUS_OK) {
+ CRYPTO_ERR("Token builder failed: %d\n", rc);
+ goto error_exit_unregister;
+ }
+
+ ZEROINIT(Cmd);
+ Cmd.Token_Handle = TokenHandle;
+ Cmd.Token_WordCount = TokenWords;
+ Cmd.SrcPkt_Handle = SrcSGListHandle;
+ Cmd.SrcPkt_ByteCount = src_pkt;
+ Cmd.DstPkt_Handle = DstSGListHandle;
+ Cmd.SA_Handle1 = SAHandle;
+ Cmd.SA_Handle2 = DMABuf_NULLHandle;
+
+ #if defined(CRYPTO_IOTOKEN_EXT)
+ InTokenDscrExt.HW_Services = IOTOKEN_CMD_PKT_LAC;
+#endif
+ InTokenDscr.TknHdrWordInit = TokenHeaderWord;
+
+ if (!crypto_iotoken_create(&InTokenDscr,
+ InTokenDscrExt_p,
+ InputToken,
+ &Cmd)) {
+ rc = 1;
+ goto error_exit_unregister;
+ }
+
+ rc = PEC_Packet_Put(PEC_INTERFACE_ID, &Cmd, 1, &count);
+ if (rc != PEC_STATUS_OK && count != 1)
+ goto error_exit_unregister;
+
+ result = kmalloc(sizeof(struct mtk_crypto_result), GFP_KERNEL);
+ if (!result) {
+ rc = 1;
+ CRYPTO_ERR("No memory for result\n");
+ goto error_exit_unregister;
+ }
+ INIT_LIST_HEAD(&result->list);
+ result->eip.sa = SAHandle.p;
+ result->eip.token = TokenHandle.p;
+ result->eip.token_context = TCRData;
+ result->eip.pkt_handle = SrcSGListHandle.p;
+ result->async = async;
+ result->dst = DstSGListHandle.p;
+
+ spin_lock_bh(&add_lock);
+ list_add_tail(&result->list, &result_list);
+ spin_unlock_bh(&add_lock);
+ CBFunc = mtk_crypto_interrupt_handler;
+ rc = PEC_ResultNotify_Request(PEC_INTERFACE_ID, CBFunc, 1);
+ if (rc != PEC_STATUS_OK) {
+ CRYPTO_ERR("PEC_ResultNotify_Request failed with rc = %d\n", rc);
+ goto error_exit_unregister;
+ }
+
+ return rc;
+
+error_exit_unregister:
+ PEC_SA_UnRegister(PEC_INTERFACE_ID, SAHandle, DMABuf_NULLHandle,
+ DMABuf_NULLHandle);
+error_remove_sg:
+ if (src == dst) {
+ dma_unmap_sg(crypto_dev, src, mtk_req->nr_src, DMA_BIDIRECTIONAL);
+ } else {
+ dma_unmap_sg(crypto_dev, src, mtk_req->nr_src, DMA_TO_DEVICE);
+ dma_unmap_sg(crypto_dev, dst, mtk_req->nr_dst, DMA_FROM_DEVICE);
+ }
+
+ if (aad != NULL)
+ kfree(aad);
+
+ crypto_free_sglist(SrcSGListHandle.p);
+ crypto_free_sglist(DstSGListHandle.p);
+
+error_exit:
+ DMABuf_Release(SAHandle);
+ DMABuf_Release(TokenHandle);
+
+ if (TCRData != NULL)
+ kfree(TCRData);
+
+ return rc;
+}
+
+int crypto_basic_cipher(struct crypto_async_request *async, struct mtk_crypto_cipher_req *mtk_req,
+ struct scatterlist *src, struct scatterlist *dst, unsigned int cryptlen,
+ unsigned int assoclen, unsigned int digestsize, u8 *iv, unsigned int ivsize)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(async->tfm);
+ struct skcipher_request *areq = skcipher_request_cast(async);
+ struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
+ struct mtk_crypto_result *result;
+ struct scatterlist *sg;
+ unsigned int totlen_src = cryptlen + assoclen;
+ unsigned int totlen_dst = totlen_src;
+ unsigned int blksize = crypto_skcipher_blocksize(skcipher);
+ int rc;
+ int i;
+ SABuilder_Params_t params;
+ SABuilder_Params_Basic_t ProtocolParams;
+ unsigned int SAWords = 0;
+
+ DMABuf_Status_t DMAStatus;
+ DMABuf_Properties_t DMAProperties = {0, 0, 0, 0};
+ DMABuf_HostAddress_t SAHostAddress;
+ DMABuf_HostAddress_t TokenHostAddress;
+ DMABuf_HostAddress_t PktHostAddress;
+
+ DMABuf_Handle_t SAHandle = {0};
+ DMABuf_Handle_t TokenHandle = {0};
+ DMABuf_Handle_t SrcSGListHandle = {0};
+ DMABuf_Handle_t DstSGListHandle = {0};
+
+ unsigned int TCRWords = 0;
+ void *TCRData = 0;
+ unsigned int TokenWords = 0;
+ unsigned int TokenHeaderWord;
+ unsigned int TokenMaxWords = 0;
+
+ TokenBuilder_Params_t TokenParams;
+ PEC_CommandDescriptor_t Cmd;
+ unsigned int count;
+
+ IOToken_Input_Dscr_t InTokenDscr;
+ IOToken_Output_Dscr_t OutTokenDscr;
+ uint32_t InputToken[IOTOKEN_IN_WORD_COUNT];
+ void *InTokenDscrExt_p = NULL;
+ PEC_NotifyFunction_t CBFunc;
+
+#ifdef CRYPTO_IOTOKEN_EXT
+ IOToken_Input_Dscr_Ext_t InTokenDscrExt;
+
+ ZEROINIT(InTokenDscrExt);
+ InTokenDscrExt_p = &InTokenDscrExt;
+#endif
+ ZEROINIT(InTokenDscr);
+ ZEROINIT(OutTokenDscr);
+
+ /* If the data is not aligned with block size, return invalid */
+ if (!IS_ALIGNED(cryptlen, blksize))
+ return -EINVAL;
+
+ /* Init SA */
+ if (mtk_req->direction == MTK_CRYPTO_ENCRYPT)
+ rc = SABuilder_Init_Basic(¶ms, &ProtocolParams, SAB_DIRECTION_OUTBOUND);
+ else
+ rc = SABuilder_Init_Basic(¶ms, &ProtocolParams, SAB_DIRECTION_INBOUND);
+ if (rc) {
+ CRYPTO_ERR("SABuilder_Init_Basic failed: %d\n", rc);
+ goto error_exit;
+ }
+
+ /* Build SA */
+ params.CryptoAlgo = lookaside_match_alg_name(ctx->alg);
+ params.CryptoMode = lookaside_match_alg_mode(ctx->mode);
+ params.KeyByteCount = ctx->key_len;
+ params.Key_p = (uint8_t *) ctx->key;
+ params.IVSrc = SAB_IV_SRC_SA;
+ if (params.CryptoMode == SAB_CRYPTO_MODE_CTR)
+ params.Nonce_p = (uint8_t *) &ctx->nonce;
+ params.IV_p = iv;
+
+ rc = SABuilder_GetSizes(¶ms, &SAWords, NULL, NULL);
+ if (rc) {
+ CRYPTO_ERR("SA not created because of size errors: %d\n", rc);
+ goto error_exit;
+ }
+
+ DMAProperties.fCached = true;
+ DMAProperties.Alignment = MTK_EIP197_INLINE_DMA_ALIGNMENT_BYTE_COUNT;
+ DMAProperties.Bank = MTK_EIP197_INLINE_BANK_TRANSFORM;
+ DMAProperties.Size = MAX(4*SAWords, 256);
+
+ DMAStatus = DMABuf_Alloc(DMAProperties, &SAHostAddress, &SAHandle);
+ if (DMAStatus != DMABUF_STATUS_OK) {
+ rc = 1;
+ CRYPTO_ERR("Allocation of SA failed: %d\n", DMAStatus);
+ goto error_exit;
+ }
+
+ rc = SABuilder_BuildSA(¶ms, (u32 *)SAHostAddress.p, NULL, NULL);
+ if (rc) {
+ CRYPTO_ERR("SA not created because of errors: %d\n", rc);
+ goto error_exit;
+ }
+
+ /* Build Token */
+ rc = TokenBuilder_GetContextSize(¶ms, &TCRWords);
+ if (rc) {
+ CRYPTO_ERR("TokenBuilder_GetContextSize returned errors: %d\n", rc);
+ goto error_exit;
+ }
+
+ TCRData = kmalloc(4 * TCRWords, GFP_KERNEL);
+ if (!TCRData) {
+ rc = 1;
+ CRYPTO_ERR("Allocation of TCR failed\n");
+ goto error_exit;
+ }
+
+ rc = TokenBuilder_BuildContext(¶ms, TCRData);
+ if (rc) {
+ CRYPTO_ERR("TokenBuilder_BuildContext failed: %d\n", rc);
+ goto error_exit;
+ }
+
+ rc = TokenBuilder_GetSize(TCRData, &TokenMaxWords);
+ if (rc) {
+ CRYPTO_ERR("TokenBuilder_GetSize failed: %d\n", rc);
+ goto error_exit;
+ }
+
+ DMAProperties.fCached = true;
+ DMAProperties.Alignment = MTK_EIP197_INLINE_DMA_ALIGNMENT_BYTE_COUNT;
+ DMAProperties.Bank = MTK_EIP197_INLINE_BANK_TOKEN;
+ DMAProperties.Size = 4*TokenMaxWords;
+
+ DMAStatus = DMABuf_Alloc(DMAProperties, &TokenHostAddress, &TokenHandle);
+ if (DMAStatus != DMABUF_STATUS_OK) {
+ rc = 1;
+ CRYPTO_ERR("Allocation of token builder failed: %d\n", DMAStatus);
+ goto error_exit;
+ }
+
+ rc = PEC_SA_Register(PEC_INTERFACE_ID, SAHandle, DMABuf_NULLHandle,
+ DMABuf_NULLHandle);
+ if (rc != PEC_STATUS_OK) {
+ CRYPTO_ERR("PEC_SA_Register failed: %d\n", rc);
+ goto error_exit;
+ }
+
+ /* Check buffer has enough size for output */
+ mtk_req->nr_src = sg_nents_for_len(src, totlen_src);
+ mtk_req->nr_dst = sg_nents_for_len(dst, totlen_dst);
+
+ if (src == dst) {
+ mtk_req->nr_src = max(mtk_req->nr_src, mtk_req->nr_dst);
+ mtk_req->nr_dst = mtk_req->nr_src;
+ if (unlikely((totlen_src || totlen_dst) && (mtk_req->nr_src <= 0))) {
+ CRYPTO_ERR("In-place buffer not large enough\n");
+ return -EINVAL;
+ }
+ dma_map_sg(crypto_dev, src, mtk_req->nr_src, DMA_BIDIRECTIONAL);
+ } else {
+ if (unlikely(totlen_src && (mtk_req->nr_src <= 0))) {
+ CRYPTO_ERR("Source buffer not large enough\n");
+ return -EINVAL;
+ }
+ dma_map_sg(crypto_dev, src, mtk_req->nr_src, DMA_TO_DEVICE);
+
+ if (unlikely(totlen_dst && (mtk_req->nr_dst <= 0))) {
+ CRYPTO_ERR("Dest buffer not large enough\n");
+ dma_unmap_sg(crypto_dev, src, mtk_req->nr_src, DMA_TO_DEVICE);
+ return -EINVAL;
+ }
+ dma_map_sg(crypto_dev, dst, mtk_req->nr_dst, DMA_FROM_DEVICE);
+ }
+
+ rc = PEC_SGList_Create(MAX(mtk_req->nr_src, 1), &SrcSGListHandle);
+ if (rc != PEC_STATUS_OK) {
+ CRYPTO_ERR("PEC_SGList_Create src failed with rc = %d\n", rc);
+ goto error_remove_sg;
+ }
+
+ DMAProperties.fCached = true;
+ DMAProperties.Alignment = MTK_EIP197_INLINE_DMA_ALIGNMENT_BYTE_COUNT;
+ DMAProperties.Bank = MTK_EIP197_INLINE_BANK_PACKET;
+ for_each_sg(src, sg, mtk_req->nr_src, i) {
+ int len = sg_dma_len(sg);
+ DMABuf_Handle_t sg_handle;
+ DMABuf_HostAddress_t host;
+
+ if (totlen_src < len)
+ len = totlen_src;
+
+ DMAProperties.Size = MAX(len, 1);
+ rc = DMABuf_Particle_Alloc(DMAProperties, sg_dma_address(sg), &host, &sg_handle);
+ if (rc != DMABUF_STATUS_OK) {
+ CRYPTO_ERR("DMABuf_Particle_Alloc failed rc = %d\n", rc);
+ goto error_remove_sg;
+ }
+ rc = PEC_SGList_Write(SrcSGListHandle, i, sg_handle, len);
+ if (rc != PEC_STATUS_OK)
+ pr_notice("PEC_SGList_Write failed rc = %d\n", rc);
+
+ totlen_src -= len;
+ if (!totlen_src)
+ break;
+ }
+
+ rc = PEC_SGList_Create(MAX(mtk_req->nr_dst, 1), &DstSGListHandle);
+ if (rc != PEC_STATUS_OK) {
+ CRYPTO_ERR("PEC_SGList_Create dst failed with rc = %d\n", rc);
+ goto error_remove_sg;
+ }
+
+ for_each_sg(dst, sg, mtk_req->nr_dst, i) {
+ int len = sg_dma_len(sg);
+ DMABuf_Handle_t sg_handle;
+ DMABuf_HostAddress_t host;
+
+ if (len > totlen_dst)
+ len = totlen_dst;
+
+ DMAProperties.Size = MAX(len, 1);
+ rc = DMABuf_Particle_Alloc(DMAProperties, sg_dma_address(sg), &host, &sg_handle);
+ if (rc != DMABUF_STATUS_OK) {
+ CRYPTO_ERR("DMABuf_Particle_Alloc failed rc = %d\n", rc);
+ goto error_remove_sg;
+ }
+ rc = PEC_SGList_Write(DstSGListHandle, i, sg_handle, len);
+
+ if (unlikely(!len))
+ break;
+ totlen_dst -= len;
+ }
+
+ if (params.CryptoMode == SAB_CRYPTO_MODE_CBC &&
+ mtk_req->direction == MTK_CRYPTO_DECRYPT)
+ sg_pcopy_to_buffer(src, mtk_req->nr_src, iv, ivsize, cryptlen - ivsize);
+
+ PktHostAddress.p = kmalloc(sizeof(uint8_t), GFP_KERNEL);
+ ZEROINIT(TokenParams);
+ rc = TokenBuilder_BuildToken(TCRData, (uint8_t *)PktHostAddress.p, cryptlen,
+ &TokenParams, (uint32_t *)TokenHostAddress.p,
+ &TokenWords, &TokenHeaderWord);
+ kfree(PktHostAddress.p);
+ if (rc != TKB_STATUS_OK) {
+ CRYPTO_ERR("Token builder failed: %d\n", rc);
+ goto error_remove_sg;
+ }
+
+ ZEROINIT(Cmd);
+ Cmd.Token_Handle = TokenHandle;
+ Cmd.Token_WordCount = TokenWords;
+ Cmd.SrcPkt_Handle = SrcSGListHandle;
+ Cmd.SrcPkt_ByteCount = cryptlen;
+ Cmd.DstPkt_Handle = DstSGListHandle;
+ Cmd.SA_Handle1 = SAHandle;
+ Cmd.SA_Handle2 = DMABuf_NULLHandle;
+
+#if defined(CRYPTO_IOTOKEN_EXT)
+ InTokenDscrExt.HW_Services = IOTOKEN_CMD_PKT_LAC;
+#endif
+ InTokenDscr.TknHdrWordInit = TokenHeaderWord;
+
+ if (!crypto_iotoken_create(&InTokenDscr,
+ InTokenDscrExt_p,
+ InputToken,
+ &Cmd)) {
+ rc = 1;
+ goto error_remove_sg;
+ }
+
+ rc = PEC_Packet_Put(PEC_INTERFACE_ID, &Cmd, 1, &count);
+ if (rc != PEC_STATUS_OK && count != 1) {
+ rc = 1;
+ CRYPTO_ERR("PEC_Packet_Put error: %d\n", rc);
+ goto error_remove_sg;
+ }
+
+ result = kmalloc(sizeof(struct mtk_crypto_result), GFP_KERNEL);
+ if (!result) {
+ rc = 1;
+ CRYPTO_ERR("No memory for result\n");
+ goto error_remove_sg;
+ }
+ INIT_LIST_HEAD(&result->list);
+ result->eip.sa = SAHandle.p;
+ result->eip.token = TokenHandle.p;
+ result->eip.token_context = TCRData;
+ result->eip.pkt_handle = SrcSGListHandle.p;
+ result->async = async;
+ result->dst = DstSGListHandle.p;
+
+ spin_lock_bh(&add_lock);
+ list_add_tail(&result->list, &result_list);
+ spin_unlock_bh(&add_lock);
+ CBFunc = mtk_crypto_interrupt_handler;
+ rc = PEC_ResultNotify_Request(PEC_INTERFACE_ID, CBFunc, 1);
+ if (rc != PEC_STATUS_OK) {
+ CRYPTO_ERR("PEC_ResultNotify_Request failed with rc = %d\n", rc);
+ goto error_remove_sg;
+ }
+ return 0;
+
+error_remove_sg:
+ if (src == dst) {
+ dma_unmap_sg(crypto_dev, src, mtk_req->nr_src, DMA_BIDIRECTIONAL);
+ } else {
+ dma_unmap_sg(crypto_dev, src, mtk_req->nr_src, DMA_TO_DEVICE);
+ dma_unmap_sg(crypto_dev, dst, mtk_req->nr_dst, DMA_FROM_DEVICE);
+ }
+
+ crypto_free_sglist(SrcSGListHandle.p);
+ crypto_free_sglist(DstSGListHandle.p);
+
+ PEC_SA_UnRegister(PEC_INTERFACE_ID, SAHandle, DMABuf_NULLHandle,
+ DMABuf_NULLHandle);
+
+error_exit:
+ DMABuf_Release(SAHandle);
+ DMABuf_Release(TokenHandle);
+
+ if (TCRData != NULL)
+ kfree(TCRData);
+
+ return rc;
+}
+
+SABuilder_Auth_t lookaside_match_hash(enum mtk_crypto_alg alg)
+{
+ switch (alg) {
+ case MTK_CRYPTO_ALG_SHA1:
+ return SAB_AUTH_HASH_SHA1;
+ case MTK_CRYPTO_ALG_SHA224:
+ return SAB_AUTH_HASH_SHA2_224;
+ case MTK_CRYPTO_ALG_SHA256:
+ return SAB_AUTH_HASH_SHA2_256;
+ case MTK_CRYPTO_ALG_SHA384:
+ return SAB_AUTH_HASH_SHA2_384;
+ case MTK_CRYPTO_ALG_SHA512:
+ return SAB_AUTH_HASH_SHA2_512;
+ case MTK_CRYPTO_ALG_MD5:
+ return SAB_AUTH_HASH_MD5;
+ case MTK_CRYPTO_ALG_XCBC:
+ return SAB_AUTH_AES_XCBC_MAC;
+ case MTK_CRYPTO_ALG_CMAC_128:
+ return SAB_AUTH_AES_CMAC_128;
+ case MTK_CRYPTO_ALG_CMAC_192:
+ return SAB_AUTH_AES_CMAC_192;
+ case MTK_CRYPTO_ALG_CMAC_256:
+ return SAB_AUTH_AES_CMAC_256;
+ default:
+ return SAB_AUTH_NULL;
+ }
+}
+
+int crypto_ahash_token_req(struct crypto_async_request *async, struct mtk_crypto_ahash_req *mtk_req,
+ uint8_t *Input_p, unsigned int InputByteCount, bool finish)
+{
+ struct mtk_crypto_result *result;
+
+ DMABuf_Properties_t DMAProperties = {0, 0, 0, 0};
+ DMABuf_HostAddress_t TokenHostAddress;
+ DMABuf_HostAddress_t PktHostAddress;
+ DMABuf_Status_t DMAStatus;
+
+ DMABuf_Handle_t TokenHandle = {0};
+ DMABuf_Handle_t PktHandle = {0};
+ DMABuf_Handle_t SAHandle = {0};
+
+ unsigned int TokenMaxWords = 0;
+ unsigned int TokenHeaderWord;
+ unsigned int TokenWords = 0;
+ void *TCRData = 0;
+
+ TokenBuilder_Params_t TokenParams;
+ PEC_CommandDescriptor_t Cmd;
+ PEC_NotifyFunction_t CBFunc;
+
+ unsigned int count;
+ int rc;
+
+ u32 InputToken[IOTOKEN_IN_WORD_COUNT];
+ IOToken_Output_Dscr_t OutTokenDscr;
+ IOToken_Input_Dscr_t InTokenDscr;
+ void *InTokenDscrExt_p = NULL;
+
+#ifdef CRYPTO_IOTOKEN_EXT
+ IOToken_Input_Dscr_Ext_t InTokenDscrExt;
+
+ ZEROINIT(InTokenDscrExt);
+ InTokenDscrExt_p = &InTokenDscrExt;
+#endif
+ ZEROINIT(InTokenDscr);
+ ZEROINIT(OutTokenDscr);
+
+ TCRData = mtk_req->token_context;
+ rc = TokenBuilder_GetSize(TCRData, &TokenMaxWords);
+ if (rc) {
+ CRYPTO_ERR("TokenBuilder_GetSize failed: %d\n", rc);
+ goto error_exit;
+ }
+
+ DMAProperties.fCached = true;
+ DMAProperties.Alignment = MTK_EIP197_INLINE_DMA_ALIGNMENT_BYTE_COUNT;
+ DMAProperties.Bank = MTK_EIP197_INLINE_BANK_TOKEN;
+ DMAProperties.Size = 4*TokenMaxWords;
+
+ DMAStatus = DMABuf_Alloc(DMAProperties, &TokenHostAddress, &TokenHandle);
+ if (DMAStatus != DMABUF_STATUS_OK) {
+ rc = 1;
+ CRYPTO_ERR("Allocation of token builder failed: %d\n", DMAStatus);
+ goto error_exit;
+ }
+
+ DMAProperties.fCached = true;
+ DMAProperties.Alignment = MTK_EIP197_INLINE_DMA_ALIGNMENT_BYTE_COUNT;
+ DMAProperties.Bank = MTK_EIP197_INLINE_BANK_PACKET;
+ DMAProperties.Size = MAX(InputByteCount, mtk_req->digest_sz);
+
+ DMAStatus = DMABuf_Alloc(DMAProperties, &PktHostAddress, &PktHandle);
+ if (DMAStatus != DMABUF_STATUS_OK) {
+ rc = 1;
+ CRYPTO_ERR("Allocation of source packet buffer failed: %d\n",
+ DMAStatus);
+ goto error_exit;
+ }
+ memcpy(PktHostAddress.p, Input_p, InputByteCount);
+
+ ZEROINIT(TokenParams);
+ TokenParams.PacketFlags |= TKB_PACKET_FLAG_HASHAPPEND;
+ if (finish)
+ TokenParams.PacketFlags |= TKB_PACKET_FLAG_HASHFINAL;
+
+ rc = TokenBuilder_BuildToken(TCRData, (u8 *) PktHostAddress.p,
+ InputByteCount, &TokenParams,
+ (u32 *) TokenHostAddress.p,
+ &TokenWords, &TokenHeaderWord);
+ if (rc != TKB_STATUS_OK) {
+ CRYPTO_ERR("Token builder failed: %d\n", rc);
+ goto error_exit_unregister;
+ }
+
+ SAHandle.p = mtk_req->sa_pointer;
+ ZEROINIT(Cmd);
+ Cmd.Token_Handle = TokenHandle;
+ Cmd.Token_WordCount = TokenWords;
+ Cmd.SrcPkt_Handle = PktHandle;
+ Cmd.SrcPkt_ByteCount = InputByteCount;
+ Cmd.DstPkt_Handle = PktHandle;
+ Cmd.SA_Handle1 = SAHandle;
+ Cmd.SA_Handle2 = DMABuf_NULLHandle;
+
+#if defined(CRYPTO_IOTOKEN_EXT)
+ InTokenDscrExt.HW_Services = IOTOKEN_CMD_PKT_LAC;
+#endif
+ InTokenDscr.TknHdrWordInit = TokenHeaderWord;
+
+ if (!crypto_iotoken_create(&InTokenDscr,
+ InTokenDscrExt_p,
+ InputToken,
+ &Cmd)) {
+ rc = 1;
+ goto error_exit_unregister;
+ }
+
+ rc = PEC_Packet_Put(PEC_INTERFACE_ID, &Cmd, 1, &count);
+ if (rc != PEC_STATUS_OK && count != 1) {
+ rc = 1;
+ CRYPTO_ERR("PEC_Packet_Put error: %d\n", rc);
+ goto error_exit_unregister;
+ }
+
+ result = kmalloc(sizeof(struct mtk_crypto_result), GFP_KERNEL);
+ if (!result) {
+ rc = 1;
+ CRYPTO_ERR("No memory for result\n");
+ goto error_exit_unregister;
+ }
+ INIT_LIST_HEAD(&result->list);
+ result->eip.token = TokenHandle.p;
+ result->eip.pkt_handle = PktHandle.p;
+ result->async = async;
+ result->dst = PktHostAddress.p;
+
+ spin_lock_bh(&add_lock);
+ list_add_tail(&result->list, &result_list);
+ spin_unlock_bh(&add_lock);
+ CBFunc = mtk_crypto_interrupt_handler;
+ rc = PEC_ResultNotify_Request(PEC_INTERFACE_ID, CBFunc, 1);
+
+ return rc;
+
+error_exit_unregister:
+ PEC_SA_UnRegister(PEC_INTERFACE_ID, SAHandle, DMABuf_NULLHandle,
+ DMABuf_NULLHandle);
+
+error_exit:
+ DMABuf_Release(SAHandle);
+ DMABuf_Release(TokenHandle);
+ DMABuf_Release(PktHandle);
+
+ if (TCRData != NULL)
+ kfree(TCRData);
+
+ return rc;
+}
+
+int crypto_ahash_aes_cbc(struct crypto_async_request *async, struct mtk_crypto_ahash_req *mtk_req,
+ uint8_t *Input_p, unsigned int InputByteCount)
+{
+ struct mtk_crypto_ahash_ctx *ctx = crypto_tfm_ctx(async->tfm);
+ struct mtk_crypto_result *result;
+ SABuilder_Params_Basic_t ProtocolParams;
+ SABuilder_Params_t params;
+ unsigned int SAWords = 0;
+ int rc;
+
+ DMABuf_Properties_t DMAProperties = {0, 0, 0, 0};
+ DMABuf_HostAddress_t TokenHostAddress;
+ DMABuf_HostAddress_t PktHostAddress;
+ DMABuf_HostAddress_t SAHostAddress;
+ DMABuf_Status_t DMAStatus;
+
+ DMABuf_Handle_t TokenHandle = {0};
+ DMABuf_Handle_t PktHandle = {0};
+ DMABuf_Handle_t SAHandle = {0};
+
+ unsigned int TokenMaxWords = 0;
+ unsigned int TokenHeaderWord;
+ unsigned int TokenWords = 0;
+ unsigned int TCRWords = 0;
+ void *TCRData = 0;
+
+ TokenBuilder_Params_t TokenParams;
+ PEC_CommandDescriptor_t Cmd;
+ PEC_NotifyFunction_t CBFunc;
+
+ unsigned int count;
+ int i;
+
+ u32 InputToken[IOTOKEN_IN_WORD_COUNT];
+ IOToken_Output_Dscr_t OutTokenDscr;
+ IOToken_Input_Dscr_t InTokenDscr;
+ void *InTokenDscrExt_p = NULL;
+
+#ifdef CRYPTO_IOTOKEN_EXT
+ IOToken_Input_Dscr_Ext_t InTokenDscrExt;
+
+ ZEROINIT(InTokenDscrExt);
+ InTokenDscrExt_p = &InTokenDscrExt;
+#endif
+ ZEROINIT(InTokenDscr);
+ ZEROINIT(OutTokenDscr);
+
+ if (!IS_ALIGNED(InputByteCount, 16)) {
+ pr_notice("not aligned: %d\n", InputByteCount);
+ return -EINVAL;
+ }
+ rc = SABuilder_Init_Basic(¶ms, &ProtocolParams, SAB_DIRECTION_OUTBOUND);
+ if (rc) {
+ CRYPTO_ERR("SABuilder_Init_Basic failed: %d\n", rc);
+ goto error_exit;
+ }
+
+ params.CryptoAlgo = SAB_CRYPTO_AES;
+ params.CryptoMode = SAB_CRYPTO_MODE_CBC;
+ params.KeyByteCount = ctx->key_sz - 2 * AES_BLOCK_SIZE;
+ params.Key_p = (uint8_t *) ctx->ipad + 2 * AES_BLOCK_SIZE;
+ params.IVSrc = SAB_IV_SRC_SA;
+ params.IV_p = (uint8_t *) mtk_req->state;
+
+ if (ctx->alg == MTK_CRYPTO_ALG_XCBC) {
+ for (i = 0; i < params.KeyByteCount; i = i + 4) {
+ swap(params.Key_p[i], params.Key_p[i+3]);
+ swap(params.Key_p[i+1], params.Key_p[i+2]);
+ }
+ }
+
+ rc = SABuilder_GetSizes(¶ms, &SAWords, NULL, NULL);
+ if (rc) {
+ CRYPTO_ERR("SA not created because of size errors: %d\n", rc);
+ goto error_exit;
+ }
+
+ DMAProperties.fCached = true;
+ DMAProperties.Alignment = MTK_EIP197_INLINE_DMA_ALIGNMENT_BYTE_COUNT;
+ DMAProperties.Bank = MTK_EIP197_INLINE_BANK_TRANSFORM;
+ DMAProperties.Size = MAX(4*SAWords, 256);
+
+ DMAStatus = DMABuf_Alloc(DMAProperties, &SAHostAddress, &SAHandle);
+ if (DMAStatus != DMABUF_STATUS_OK) {
+ rc = 1;
+ CRYPTO_ERR("Allocation of SA failed: %d\n", DMAStatus);
+ goto error_exit;
+ }
+
+ rc = SABuilder_BuildSA(¶ms, (u32 *)SAHostAddress.p, NULL, NULL);
+ if (rc) {
+ CRYPTO_ERR("SA not created because of errors: %d\n", rc);
+ goto error_exit;
+ }
+
+ rc = TokenBuilder_GetContextSize(¶ms, &TCRWords);
+ if (rc) {
+ CRYPTO_ERR("TokenBuilder_GetContextSize returned errors: %d\n", rc);
+ goto error_exit;
+ }
+
+ TCRData = kmalloc(4 * TCRWords, GFP_KERNEL);
+ if (!TCRData) {
+ rc = 1;
+ CRYPTO_ERR("Allocation of TCR failed\n");
+ goto error_exit;
+ }
+
+ rc = TokenBuilder_BuildContext(¶ms, TCRData);
+ if (rc) {
+ CRYPTO_ERR("TokenBuilder_BuildContext failed: %d\n", rc);
+ goto error_exit;
+ }
+
+ rc = TokenBuilder_GetSize(TCRData, &TokenMaxWords);
+ if (rc) {
+ CRYPTO_ERR("TokenBuilder_GetSize failed: %d\n", rc);
+ goto error_exit;
+ }
+
+ DMAProperties.fCached = true;
+ DMAProperties.Alignment = MTK_EIP197_INLINE_DMA_ALIGNMENT_BYTE_COUNT;
+ DMAProperties.Bank = MTK_EIP197_INLINE_BANK_TOKEN;
+ DMAProperties.Size = 4*TokenMaxWords;
+
+ DMAStatus = DMABuf_Alloc(DMAProperties, &TokenHostAddress, &TokenHandle);
+ if (DMAStatus != DMABUF_STATUS_OK) {
+ rc = 1;
+ CRYPTO_ERR("Allocation of token builder failed: %d\n", DMAStatus);
+ goto error_exit;
+ }
+
+ DMAProperties.fCached = true;
+ DMAProperties.Alignment = MTK_EIP197_INLINE_DMA_ALIGNMENT_BYTE_COUNT;
+ DMAProperties.Bank = MTK_EIP197_INLINE_BANK_PACKET;
+ DMAProperties.Size = MAX(InputByteCount, 1);
+
+ DMAStatus = DMABuf_Alloc(DMAProperties, &PktHostAddress, &PktHandle);
+ if (DMAStatus != DMABUF_STATUS_OK) {
+ rc = 1;
+ CRYPTO_ERR("Allocation of source packet buffer failed: %d\n", DMAStatus);
+ goto error_exit;
+ }
+
+ rc = PEC_SA_Register(PEC_INTERFACE_ID, SAHandle, DMABuf_NULLHandle,
+ DMABuf_NULLHandle);
+ if (rc != PEC_STATUS_OK) {
+ CRYPTO_ERR("PEC_SA_Register failed: %d\n", rc);
+ goto error_exit;
+ }
+
+ memcpy(PktHostAddress.p, Input_p, InputByteCount);
+
+ ZEROINIT(TokenParams);
+ rc = TokenBuilder_BuildToken(TCRData, (uint8_t *) PktHostAddress.p, InputByteCount,
+ &TokenParams, (uint32_t *) TokenHostAddress.p,
+ &TokenWords, &TokenHeaderWord);
+ if (rc != TKB_STATUS_OK) {
+ CRYPTO_ERR("Token builder failed: %d\n", rc);
+ goto error_exit_unregister;
+ }
+
+ ZEROINIT(Cmd);
+ Cmd.Token_Handle = TokenHandle;
+ Cmd.Token_WordCount = TokenWords;
+ Cmd.SrcPkt_Handle = PktHandle;
+ Cmd.SrcPkt_ByteCount = InputByteCount;
+ Cmd.DstPkt_Handle = PktHandle;
+ Cmd.SA_Handle1 = SAHandle;
+ Cmd.SA_Handle2 = DMABuf_NULLHandle;
+
+#if defined(CRYPTO_IOTOKEN_EXT)
+ InTokenDscrExt.HW_Services = IOTOKEN_CMD_PKT_LAC;
+#endif
+ InTokenDscr.TknHdrWordInit = TokenHeaderWord;
+
- dscr_p->InPacket_ByteCount = pec_cmd_dscr->SrcPkt_ByteCount;
- dscr_p->Ext_p = ext_p;
+ if (!crypto_iotoken_create(&InTokenDscr,
+ InTokenDscrExt_p,
+ InputToken,
+ &Cmd)) {
+ rc = 1;
+ goto error_exit_unregister;
+ }
- IOTokenRc = IOToken_Create(dscr_p, data_p);
- if (IOTokenRc < 0) {
- CRYPTO_ERR("IOToken_Create error %d\n", IOTokenRc);
- return false;
+ rc = PEC_Packet_Put(PEC_INTERFACE_ID, &Cmd, 1, &count);
+ if (rc != PEC_STATUS_OK && count != 1) {
+ rc = 1;
+ CRYPTO_ERR("PEC_Packet_Put error: %d\n", rc);
+ goto error_exit_unregister;
}
- pec_cmd_dscr->InputToken_p = data_p;
+ result = kmalloc(sizeof(struct mtk_crypto_result), GFP_KERNEL);
+ if (!result) {
+ rc = 1;
+ CRYPTO_ERR("No memory for result\n");
+ goto error_exit_unregister;
+ }
+ INIT_LIST_HEAD(&result->list);
+ result->eip.sa = SAHandle.p;
+ result->eip.token = TokenHandle.p;
+ result->eip.token_context = TCRData;
+ result->eip.pkt_handle = PktHandle.p;
+ result->async = async;
+ result->dst = PktHostAddress.p;
+ result->size = InputByteCount;
- return true;
+ spin_lock_bh(&add_lock);
+ list_add_tail(&result->list, &result_list);
+ spin_unlock_bh(&add_lock);
+
+ CBFunc = mtk_crypto_interrupt_handler;
+ rc = PEC_ResultNotify_Request(PEC_INTERFACE_ID, CBFunc, 1);
+ if (rc != PEC_STATUS_OK) {
+ CRYPTO_ERR("PEC_ResultNotify_Request failed with rc = %d\n", rc);
+ goto error_exit_unregister;
+ }
+ return 0;
+
+error_exit_unregister:
+ PEC_SA_UnRegister(PEC_INTERFACE_ID, SAHandle, DMABuf_NULLHandle,
+ DMABuf_NULLHandle);
+
+error_exit:
+ DMABuf_Release(SAHandle);
+ DMABuf_Release(TokenHandle);
+ DMABuf_Release(PktHandle);
+
+ if (TCRData != NULL)
+ kfree(TCRData);
+
+ return rc;
}
-unsigned int crypto_pe_get_one(IOToken_Output_Dscr_t *const OutTokenDscr_p,
- u32 *OutTokenData_p,
- PEC_ResultDescriptor_t *RD_p)
+int crypto_first_ahash_req(struct crypto_async_request *async,
+ struct mtk_crypto_ahash_req *mtk_req, uint8_t *Input_p,
+ unsigned int InputByteCount, bool finish)
{
- int LoopCounter = MTK_EIP197_INLINE_NOF_TRIES;
- int IOToken_Rc;
- PEC_Status_t pecres;
+ struct mtk_crypto_ahash_ctx *ctx = crypto_tfm_ctx(async->tfm);
+ struct mtk_crypto_result *result;
+ SABuilder_Params_Basic_t ProtocolParams;
+ SABuilder_Params_t params;
+ unsigned int SAWords = 0;
+ static uint8_t DummyAuthKey[64];
+ int rc;
- ZEROINIT(*OutTokenDscr_p);
- ZEROINIT(*RD_p);
+ DMABuf_Properties_t DMAProperties = {0, 0, 0, 0};
+ DMABuf_HostAddress_t TokenHostAddress;
+ DMABuf_HostAddress_t PktHostAddress;
+ DMABuf_HostAddress_t SAHostAddress;
+ DMABuf_Status_t DMAStatus;
- /* Link data structures */
- RD_p->OutputToken_p = OutTokenData_p;
+ DMABuf_Handle_t TokenHandle = {0};
+ DMABuf_Handle_t PktHandle = {0};
+ DMABuf_Handle_t SAHandle = {0};
- while (LoopCounter > 0) {
- /* Try to get the processed packet from the driver */
- unsigned int Counter = 0;
+ unsigned int TokenMaxWords = 0;
+ unsigned int TokenHeaderWord;
+ unsigned int TokenWords = 0;
+ unsigned int TCRWords = 0;
+ void *TCRData = 0;
- pecres = PEC_Packet_Get(PEC_INTERFACE_ID, RD_p, 1, &Counter);
- if (pecres != PEC_STATUS_OK) {
- /* IO error */
- CRYPTO_ERR("PEC_Packet_Get error %d\n", pecres);
- return 0;
- }
+ TokenBuilder_Params_t TokenParams;
+ PEC_CommandDescriptor_t Cmd;
+ PEC_NotifyFunction_t CBFunc;
- if (Counter) {
- IOToken_Rc = IOToken_Parse(OutTokenData_p, OutTokenDscr_p);
- if (IOToken_Rc < 0) {
- /* IO error */
- CRYPTO_ERR("IOToken_Parse error %d\n", IOToken_Rc);
- return 0;
- }
+ unsigned int count;
+ int i;
- if (OutTokenDscr_p->ErrorCode != 0) {
- /* Packet process error */
- CRYPTO_ERR("Result descriptor error 0x%x\n",
- OutTokenDscr_p->ErrorCode);
- return 0;
- }
+ u32 InputToken[IOTOKEN_IN_WORD_COUNT];
+ IOToken_Output_Dscr_t OutTokenDscr;
+ IOToken_Input_Dscr_t InTokenDscr;
+ void *InTokenDscrExt_p = NULL;
- /* packet received */
- return Counter;
+#ifdef CRYPTO_IOTOKEN_EXT
+ IOToken_Input_Dscr_Ext_t InTokenDscrExt;
+
+ ZEROINIT(InTokenDscrExt);
+ InTokenDscrExt_p = &InTokenDscrExt;
+#endif
+ ZEROINIT(InTokenDscr);
+ ZEROINIT(OutTokenDscr);
+
+ rc = SABuilder_Init_Basic(¶ms, &ProtocolParams, SAB_DIRECTION_OUTBOUND);
+ if (rc) {
+ CRYPTO_ERR("SABuilder_Init_Basic failed: %d\n", rc);
+ goto error_exit;
+ }
+
+ params.IV_p = (uint8_t *) ctx->ipad;
+ params.AuthAlgo = lookaside_match_hash(ctx->alg);
+ params.AuthKey1_p = DummyAuthKey;
+ if (params.AuthAlgo == SAB_AUTH_AES_XCBC_MAC) {
+ params.AuthKey1_p = (uint8_t *) ctx->ipad + 2 * AES_BLOCK_SIZE;
+ params.AuthKey2_p = (uint8_t *) ctx->ipad;
+ params.AuthKey3_p = (uint8_t *) ctx->ipad + AES_BLOCK_SIZE;
+
+ for (i = 0; i < AES_BLOCK_SIZE; i = i + 4) {
+ swap(params.AuthKey1_p[i], params.AuthKey1_p[i+3]);
+ swap(params.AuthKey1_p[i+1], params.AuthKey1_p[i+2]);
+
+ swap(params.AuthKey2_p[i], params.AuthKey2_p[i+3]);
+ swap(params.AuthKey2_p[i+1], params.AuthKey2_p[i+2]);
+
+ swap(params.AuthKey3_p[i], params.AuthKey3_p[i+3]);
+ swap(params.AuthKey3_p[i+1], params.AuthKey3_p[i+2]);
}
+ }
- /* Wait for MTK_EIP197_PKT_GET_TIMEOUT_MS milliseconds */
- udelay(MTK_EIP197_PKT_GET_TIMEOUT_MS * 1000);
- LoopCounter--;
+ if (!finish)
+ params.flags |= SAB_FLAG_HASH_SAVE | SAB_FLAG_HASH_INTERMEDIATE;
+
+ params.flags |= SAB_FLAG_SUPPRESS_PAYLOAD;
+ ProtocolParams.ICVByteCount = mtk_req->digest_sz;
+
+ rc = SABuilder_GetSizes(¶ms, &SAWords, NULL, NULL);
+ if (rc) {
+ CRYPTO_ERR("SA not created because of size errors: %d\n", rc);
+ goto error_exit;
+ }
+
+ DMAProperties.fCached = true;
+ DMAProperties.Alignment = MTK_EIP197_INLINE_DMA_ALIGNMENT_BYTE_COUNT;
+ DMAProperties.Bank = MTK_EIP197_INLINE_BANK_TRANSFORM;
+ DMAProperties.Size = MAX(4*SAWords, 256);
+
+ DMAStatus = DMABuf_Alloc(DMAProperties, &SAHostAddress, &SAHandle);
+ if (DMAStatus != DMABUF_STATUS_OK) {
+ rc = 1;
+ CRYPTO_ERR("Allocation of SA failed: %d\n", DMAStatus);
+ goto error_exit;
}
- CRYPTO_ERR("Timeout when reading packet\n");
+ rc = SABuilder_BuildSA(¶ms, (u32 *)SAHostAddress.p, NULL, NULL);
+ if (rc) {
+ CRYPTO_ERR("SA not created because of errors: %d\n", rc);
+ goto error_exit;
+ }
- /* IO error (timeout, not result packet received) */
- return 0;
-}
+ rc = TokenBuilder_GetContextSize(¶ms, &TCRWords);
+ if (rc) {
+ CRYPTO_ERR("TokenBuilder_GetContextSize returned errors: %d\n", rc);
+ goto error_exit;
+ }
+
+ TCRData = kmalloc(4 * TCRWords, GFP_KERNEL);
+ if (!TCRData) {
+ rc = 1;
+ CRYPTO_ERR("Allocation of TCR failed\n");
+ goto error_exit;
+ }
+
+ rc = TokenBuilder_BuildContext(¶ms, TCRData);
+ if (rc) {
+ CRYPTO_ERR("TokenBuilder_BuildContext failed: %d\n", rc);
+ goto error_exit;
+ }
+ mtk_req->token_context = TCRData;
+
+ rc = TokenBuilder_GetSize(TCRData, &TokenMaxWords);
+ if (rc) {
+ CRYPTO_ERR("TokenBuilder_GetSize failed: %d\n", rc);
+ goto error_exit;
+ }
+
+ DMAProperties.fCached = true;
+ DMAProperties.Alignment = MTK_EIP197_INLINE_DMA_ALIGNMENT_BYTE_COUNT;
+ DMAProperties.Bank = MTK_EIP197_INLINE_BANK_TOKEN;
+ DMAProperties.Size = 4*TokenMaxWords;
+
+ DMAStatus = DMABuf_Alloc(DMAProperties, &TokenHostAddress, &TokenHandle);
+ if (DMAStatus != DMABUF_STATUS_OK) {
+ rc = 1;
+ CRYPTO_ERR("Allocation of token builder failed: %d\n", DMAStatus);
+ goto error_exit;
+ }
+
+ DMAProperties.fCached = true;
+ DMAProperties.Alignment = MTK_EIP197_INLINE_DMA_ALIGNMENT_BYTE_COUNT;
+ DMAProperties.Bank = MTK_EIP197_INLINE_BANK_PACKET;
+ DMAProperties.Size = MAX(InputByteCount, mtk_req->digest_sz);
+
+ DMAStatus = DMABuf_Alloc(DMAProperties, &PktHostAddress, &PktHandle);
+ if (DMAStatus != DMABUF_STATUS_OK) {
+ rc = 1;
+ CRYPTO_ERR("Allocation of source packet buffer failed: %d\n",
+ DMAStatus);
+ goto error_exit;
+ }
+ rc = PEC_SA_Register(PEC_INTERFACE_ID, SAHandle, DMABuf_NULLHandle,
+ DMABuf_NULLHandle);
+ if (rc != PEC_STATUS_OK) {
+ CRYPTO_ERR("PEC_SA_Register failed: %d\n", rc);
+ goto error_exit;
+ }
+ memcpy(PktHostAddress.p, Input_p, InputByteCount);
+
+ ZEROINIT(TokenParams);
+ TokenParams.PacketFlags |= (TKB_PACKET_FLAG_HASHFIRST
+ | TKB_PACKET_FLAG_HASHAPPEND);
+ if (finish)
+ TokenParams.PacketFlags |= TKB_PACKET_FLAG_HASHFINAL;
+
+ rc = TokenBuilder_BuildToken(TCRData, (u8 *) PktHostAddress.p,
+ InputByteCount, &TokenParams,
+ (u32 *) TokenHostAddress.p,
+ &TokenWords, &TokenHeaderWord);
+ if (rc != TKB_STATUS_OK) {
+ CRYPTO_ERR("Token builder failed: %d\n", rc);
+ goto error_exit_unregister;
+ }
+
+ ZEROINIT(Cmd);
+ Cmd.Token_Handle = TokenHandle;
+ Cmd.Token_WordCount = TokenWords;
+ Cmd.SrcPkt_Handle = PktHandle;
+ Cmd.SrcPkt_ByteCount = InputByteCount;
+ Cmd.DstPkt_Handle = PktHandle;
+ Cmd.SA_Handle1 = SAHandle;
+ Cmd.SA_Handle2 = DMABuf_NULLHandle;
+
+ mtk_req->sa_pointer = SAHandle.p;
+
+#if defined(CRYPTO_IOTOKEN_EXT)
+ InTokenDscrExt.HW_Services = IOTOKEN_CMD_PKT_LAC;
+#endif
+ InTokenDscr.TknHdrWordInit = TokenHeaderWord;
+
+ if (!crypto_iotoken_create(&InTokenDscr,
+ InTokenDscrExt_p,
+ InputToken,
+ &Cmd)) {
+ rc = 1;
+ goto error_exit_unregister;
+ }
+
+ rc = PEC_Packet_Put(PEC_INTERFACE_ID, &Cmd, 1, &count);
+ if (rc != PEC_STATUS_OK && count != 1) {
+ rc = 1;
+ CRYPTO_ERR("PEC_Packet_Put error: %d\n", rc);
+ goto error_exit_unregister;
+ }
+
+ result = kmalloc(sizeof(struct mtk_crypto_result), GFP_KERNEL);
+ if (!result) {
+ rc = 1;
+ CRYPTO_ERR("No memory for result\n");
+ goto error_exit_unregister;
+ }
+ INIT_LIST_HEAD(&result->list);
+ result->eip.token = TokenHandle.p;
+ result->eip.pkt_handle = PktHandle.p;
+ result->async = async;
+ result->dst = PktHostAddress.p;
+
+ spin_lock_bh(&add_lock);
+ list_add_tail(&result->list, &result_list);
+ spin_unlock_bh(&add_lock);
+ CBFunc = mtk_crypto_interrupt_handler;
+ rc = PEC_ResultNotify_Request(PEC_INTERFACE_ID, CBFunc, 1);
+
+ return rc;
+
+error_exit_unregister:
+ PEC_SA_UnRegister(PEC_INTERFACE_ID, SAHandle, DMABuf_NULLHandle,
+ DMABuf_NULLHandle);
+
+error_exit:
+ DMABuf_Release(SAHandle);
+ DMABuf_Release(TokenHandle);
+ DMABuf_Release(PktHandle);
+
+ if (TCRData != NULL)
+ kfree(TCRData);
+
+ return rc;
+}
bool crypto_basic_hash(SABuilder_Auth_t HashAlgo, uint8_t *Input_p,
unsigned int InputByteCount, uint8_t *Output_p,
@@ -265,7 +1854,6 @@
Cmd.SA_Handle1 = SAHandle;
Cmd.SA_Handle2 = DMABuf_NULLHandle;
-
#if defined(CRYPTO_IOTOKEN_EXT)
InTokenDscrExt.HW_Services = IOTOKEN_CMD_PKT_LAC;
#endif
@@ -286,9 +1874,9 @@
goto error_exit_unregister;
}
- if (crypto_pe_get_one(&OutTokenDscr, OutputToken, &Res) < 1) {
+ if (crypto_pe_busy_get_one(&OutTokenDscr, OutputToken, &Res) < 1) {
rc = 1;
- CRYPTO_ERR("error from crypto_pe_get_one\n");
+ CRYPTO_ERR("error from crypto_pe_busy_get_one\n");
goto error_exit_unregister;
}
memcpy(Output_p, PktHostAddress.p, OutputByteCount);
diff --git a/feed/kernel/crypto-eip/src/inc/crypto-eip/crypto-eip.h b/feed/kernel/crypto-eip/src/inc/crypto-eip/crypto-eip.h
index 524e4c6..5d27272 100644
--- a/feed/kernel/crypto-eip/src/inc/crypto-eip/crypto-eip.h
+++ b/feed/kernel/crypto-eip/src/inc/crypto-eip/crypto-eip.h
@@ -19,6 +19,7 @@
struct mtk_crypto;
extern struct mtk_crypto mcrypto;
+extern spinlock_t add_lock;
#define TRANSFORM_RECORD_LEN 64
diff --git a/feed/kernel/crypto-eip/src/inc/crypto-eip/crypto-eip197-inline-ddk.h b/feed/kernel/crypto-eip/src/inc/crypto-eip/crypto-eip197-inline-ddk.h
index 00fb1a8..a5f6b20 100644
--- a/feed/kernel/crypto-eip/src/inc/crypto-eip/crypto-eip197-inline-ddk.h
+++ b/feed/kernel/crypto-eip/src/inc/crypto-eip/crypto-eip197-inline-ddk.h
@@ -25,6 +25,7 @@
#include <crypto-eip/ddk/slad/api_pcl.h>
#include <crypto-eip/ddk/slad/api_pcl_dtl.h>
#include <crypto-eip/ddk/slad/api_pec.h>
+#include <crypto-eip/ddk/slad/api_pec_sg.h>
#include <crypto-eip/ddk/log/log.h>
#ifdef DDK_EIP197_FW33_FEATURES
diff --git a/feed/kernel/crypto-eip/src/inc/crypto-eip/ddk-wrapper.h b/feed/kernel/crypto-eip/src/inc/crypto-eip/ddk-wrapper.h
index db2b84f..dabc743 100644
--- a/feed/kernel/crypto-eip/src/inc/crypto-eip/ddk-wrapper.h
+++ b/feed/kernel/crypto-eip/src/inc/crypto-eip/ddk-wrapper.h
@@ -10,8 +10,37 @@
#define _CRYPTO_EIP_DDK_WRAPPER_H_
#include "crypto-eip.h"
+#include "lookaside.h"
+#include "crypto-eip197-inline-ddk.h"
+void mtk_crypto_interrupt_handler(void);
u32 *mtk_ddk_tr_ipsec_build(struct mtk_xfrm_params *xfrm_params, u32 ipsec_mod);
+int crypto_basic_cipher(struct crypto_async_request *async, struct mtk_crypto_cipher_req *mtk_req,
+ struct scatterlist *src, struct scatterlist *dst, unsigned int cryptlen,
+ unsigned int assoclen, unsigned int digestsize, u8 *iv, unsigned int ivsize);
+int crypto_aead_cipher(struct crypto_async_request *async, struct mtk_crypto_cipher_req *mtk_req,
+ struct scatterlist *src, struct scatterlist *dst, unsigned int cryptlen,
+ unsigned int assoclen, unsigned int digestsize, u8 *iv, unsigned int ivsize);
+int crypto_ahash_token_req(struct crypto_async_request *async,
+ struct mtk_crypto_ahash_req *mtk_req, uint8_t *Input_p,
+ unsigned int InputByteCount, /*uint8_t *Output_p,*/
+ /*unsigned int OutputByteCount,*/ bool finish);
+int crypto_first_ahash_req(struct crypto_async_request *async,
+ struct mtk_crypto_ahash_req *mtk_req, uint8_t *Input_p,
+ unsigned int InputByteCount, /*uint8_t *Output_p,*/
+ /*unsigned int OutputByteCount,*/ bool finish);
+int crypto_ahash_aes_cbc(struct crypto_async_request *async,
+ struct mtk_crypto_ahash_req *mtk_req, uint8_t *Input_p,
+ unsigned int InputByteCount);
+bool crypto_hmac_precompute(SABuilder_Auth_t AuthAlgo,
+ uint8_t *AuthKey_p,
+ unsigned int AuthKeyByteCount,
+ uint8_t *Inner_p,
+ uint8_t *Outer_p);
+void crypto_free_sa(void *sa_pointer);
+void crypto_free_token(void *token);
+void crypto_free_pkt(void *pkt);
+void crypto_free_sglist(void *sglist);
int mtk_ddk_pec_init(void);
void mtk_ddk_pec_deinit(void);
diff --git a/feed/kernel/crypto-eip/src/inc/crypto-eip/lookaside.h b/feed/kernel/crypto-eip/src/inc/crypto-eip/lookaside.h
new file mode 100644
index 0000000..cc9bc29
--- /dev/null
+++ b/feed/kernel/crypto-eip/src/inc/crypto-eip/lookaside.h
@@ -0,0 +1,276 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * Copyright (C) 2023 MediaTek Inc.
+ *
+ * Author: Chris.Chou <chris.chou@mediatek.com>
+ * Ren-Ting Wang <ren-ting.wang@mediatek.com>
+ */
+
+#ifndef _LOOKASIDE_H_
+#define _LOOKASIDE_H_
+
+#include <crypto/aes.h>
+#include <crypto/sha.h>
+#include <crypto/md5.h>
+#include <linux/io.h>
+#include <linux/list.h>
+#include <crypto/algapi.h>
+#include <crypto/skcipher.h>
+#include <crypto/aead.h>
+#include <crypto/hash.h>
+
+#include "crypto-eip197-inline-ddk.h"
+
+#define EIP197_DEFAULT_RING_SIZE 400
+#define MTK_CRYPTO_PRIORITY 1500
+#define EIP197_AEAD_TYPE_IPSEC_ESP 3
+#define EIP197_AEAD_TYPE_IPSEC_ESP_GMAC 4
+#define EIP197_AEAD_IPSEC_IV_SIZE 8
+#define EIP197_AEAD_IPSEC_CCM_NONCE_SIZE 3
+
+extern struct mtk_crypto_alg_template mtk_crypto_cbc_aes;
+extern struct mtk_crypto_alg_template mtk_crypto_ofb_aes;
+extern struct mtk_crypto_alg_template mtk_crypto_ecb_aes;
+extern struct mtk_crypto_alg_template mtk_crypto_cfb_aes;
+extern struct mtk_crypto_alg_template mtk_crypto_ctr_aes;
+extern struct mtk_crypto_alg_template mtk_crypto_cbc_des;
+extern struct mtk_crypto_alg_template mtk_crypto_ecb_des;
+extern struct mtk_crypto_alg_template mtk_crypto_cbc_des3_ede;
+extern struct mtk_crypto_alg_template mtk_crypto_ecb_des3_ede;
+
+extern struct mtk_crypto_alg_template mtk_crypto_sha1;
+extern struct mtk_crypto_alg_template mtk_crypto_hmac_sha1;
+extern struct mtk_crypto_alg_template mtk_crypto_sha224;
+extern struct mtk_crypto_alg_template mtk_crypto_hmac_sha224;
+extern struct mtk_crypto_alg_template mtk_crypto_sha256;
+extern struct mtk_crypto_alg_template mtk_crypto_hmac_sha256;
+extern struct mtk_crypto_alg_template mtk_crypto_sha384;
+extern struct mtk_crypto_alg_template mtk_crypto_hmac_sha384;
+extern struct mtk_crypto_alg_template mtk_crypto_sha512;
+extern struct mtk_crypto_alg_template mtk_crypto_hmac_sha512;
+extern struct mtk_crypto_alg_template mtk_crypto_md5;
+extern struct mtk_crypto_alg_template mtk_crypto_hmac_md5;
+extern struct mtk_crypto_alg_template mtk_crypto_xcbcmac;
+extern struct mtk_crypto_alg_template mtk_crypto_cmac;
+
+extern struct mtk_crypto_alg_template mtk_crypto_hmac_sha1_cbc_aes;
+extern struct mtk_crypto_alg_template mtk_crypto_hmac_sha224_cbc_aes;
+extern struct mtk_crypto_alg_template mtk_crypto_hmac_sha256_cbc_aes;
+extern struct mtk_crypto_alg_template mtk_crypto_hmac_sha384_cbc_aes;
+extern struct mtk_crypto_alg_template mtk_crypto_hmac_sha512_cbc_aes;
+extern struct mtk_crypto_alg_template mtk_crypto_hmac_md5_cbc_aes;
+extern struct mtk_crypto_alg_template mtk_crypto_hmac_sha1_cbc_des3_ede;
+extern struct mtk_crypto_alg_template mtk_crypto_hmac_sha224_cbc_des3_ede;
+extern struct mtk_crypto_alg_template mtk_crypto_hmac_sha256_cbc_des3_ede;
+extern struct mtk_crypto_alg_template mtk_crypto_hmac_sha384_cbc_des3_ede;
+extern struct mtk_crypto_alg_template mtk_crypto_hmac_sha512_cbc_des3_ede;
+extern struct mtk_crypto_alg_template mtk_crypto_hmac_md5_cbc_des3_ede;
+extern struct mtk_crypto_alg_template mtk_crypto_hmac_sha1_cbc_des;
+extern struct mtk_crypto_alg_template mtk_crypto_hmac_sha224_cbc_des;
+extern struct mtk_crypto_alg_template mtk_crypto_hmac_sha256_cbc_des;
+extern struct mtk_crypto_alg_template mtk_crypto_hmac_sha384_cbc_des;
+extern struct mtk_crypto_alg_template mtk_crypto_hmac_sha512_cbc_des;
+extern struct mtk_crypto_alg_template mtk_crypto_hmac_sha1_ctr_aes;
+extern struct mtk_crypto_alg_template mtk_crypto_gcm;
+extern struct mtk_crypto_alg_template mtk_crypto_rfc4106_gcm;
+extern struct mtk_crypto_alg_template mtk_crypto_rfc4543_gcm;
+extern struct mtk_crypto_alg_template mtk_crypto_rfc4309_ccm;
+
+// general
+struct mtk_crypto_work_data {
+ struct work_struct work;
+ struct mtk_crypto_priv *priv;
+};
+
+struct mtk_crypto_queue {
+ spinlock_t lock;
+
+ struct workqueue_struct *workqueue;
+ struct mtk_crypto_work_data work_data;
+
+ struct crypto_queue queue;
+ spinlock_t queue_lock;
+};
+
+struct mtk_crypto_priv {
+ struct mtk_crypto_queue mtk_eip_queue;
+};
+
+struct mtk_crypto_engine_data {
+ void *sa;
+ void *token;
+ void *token_context;
+ void *pkt_handle;
+};
+
+struct mtk_crypto_result {
+ struct list_head list;
+ void *dst;
+ struct mtk_crypto_engine_data eip;
+ struct crypto_async_request *async;
+ int size;
+};
+
+enum mtk_crypto_alg_type {
+ MTK_CRYPTO_ALG_TYPE_SKCIPHER,
+ MTK_CRYPTO_ALG_TYPE_AEAD,
+ MTK_CRYPTO_ALG_TYPE_AHASH,
+};
+
+struct mtk_crypto_alg_template {
+ struct mtk_crypto_priv *priv;
+ enum mtk_crypto_alg_type type;
+ union {
+ struct skcipher_alg skcipher;
+ struct aead_alg aead;
+ struct ahash_alg ahash;
+ } alg;
+};
+
+// cipher algos
+struct mtk_crypto_context {
+ int (*send)(struct crypto_async_request *req);
+ int (*handle_result)(struct mtk_crypto_result *req, int err);
+};
+
+enum mtk_crypto_cipher_direction {
+ MTK_CRYPTO_ENCRYPT,
+ MTK_CRYPTO_DECRYPT,
+};
+
+enum mtk_crypto_cipher_mode {
+ MTK_CRYPTO_MODE_CBC,
+ MTK_CRYPTO_MODE_ECB,
+ MTK_CRYPTO_MODE_OFB,
+ MTK_CRYPTO_MODE_CFB,
+ MTK_CRYPTO_MODE_CTR,
+ MTK_CRYPTO_MODE_GCM,
+ MTK_CRYPTO_MODE_GMAC,
+ MTK_CRYPTO_MODE_CCM,
+};
+
+enum mtk_crypto_alg {
+ MTK_CRYPTO_AES,
+ MTK_CRYPTO_DES,
+ MTK_CRYPTO_3DES,
+ MTK_CRYPTO_ALG_SHA1,
+ MTK_CRYPTO_ALG_SHA224,
+ MTK_CRYPTO_ALG_SHA256,
+ MTK_CRYPTO_ALG_SHA384,
+ MTK_CRYPTO_ALG_SHA512,
+ MTK_CRYPTO_ALG_MD5,
+ MTK_CRYPTO_ALG_GCM,
+ MTK_CRYPTO_ALG_GMAC,
+ MTK_CRYPTO_ALG_CCM,
+ MTK_CRYPTO_ALG_XCBC,
+ MTK_CRYPTO_ALG_CMAC_128,
+ MTK_CRYPTO_ALG_CMAC_192,
+ MTK_CRYPTO_ALG_CMAC_256,
+};
+
+struct mtk_crypto_cipher_req {
+ enum mtk_crypto_cipher_direction direction;
+ int nr_src;
+ int nr_dst;
+};
+
+struct mtk_crypto_cipher_ctx {
+ struct mtk_crypto_context base;
+ struct mtk_crypto_priv *priv;
+
+ u8 aead;
+
+ enum mtk_crypto_cipher_mode mode;
+ enum mtk_crypto_alg alg;
+ u8 blocksz;
+ __le32 key[16];
+ u32 nonce;
+ unsigned int key_len;
+
+ enum mtk_crypto_alg hash_alg;
+ u32 state_sz;
+ __be32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
+ __be32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
+
+ struct crypto_cipher *hkaes;
+ struct crypto_aead *fback;
+};
+
+enum mtk_crypto_ahash_digest {
+ MTK_CRYPTO_DIGEST_INITIAL,
+ MTK_CRYPTO_DIGEST_PRECOMPUTED,
+ MTK_CRYPTO_DIGEST_XCM,
+ MTK_CRYPTO_DIGEST_HMAC,
+};
+
+struct mtk_crypto_ahash_ctx {
+ struct mtk_crypto_context base;
+ struct mtk_crypto_priv *priv;
+
+ enum mtk_crypto_alg alg;
+ u8 key_sz;
+ bool cbcmac;
+
+ __le32 ipad[SHA512_BLOCK_SIZE / sizeof(__le32)];
+ __le32 opad[SHA512_BLOCK_SIZE / sizeof(__le32)];
+ __le32 zero_hmac[SHA512_BLOCK_SIZE / sizeof(__le32)];
+
+ void *ipad_sa;
+ void *ipad_token;
+ void *opad_sa;
+ void *opad_token;
+
+ struct crypto_cipher *kaes;
+ struct crypto_ahash *fback;
+ struct crypto_shash *shpre;
+ struct shash_desc *shdesc;
+};
+
+struct mtk_crypto_ahash_req {
+ bool last_req;
+ bool finish;
+ bool hmac;
+ bool hmac_zlen;
+ bool len_is_le;
+ bool not_first;
+ bool xcbcmac;
+
+ int nents;
+
+ u32 digest;
+
+ u8 state_sz;
+ u8 block_sz;
+ u8 digest_sz;
+ u32 state[SHA512_DIGEST_SIZE / sizeof(u32)];
+
+ u64 len;
+ u64 processed;
+
+ u8 cache[HASH_CACHE_SIZE] __aligned(sizeof(u32));
+
+ void *sa_pointer;
+ void *token_context;
+ u8 cache_next[HASH_CACHE_SIZE] __aligned(sizeof(u32));
+};
+
+#define HASH_CACHE_SIZE SHA512_BLOCK_SIZE
+
+struct mtk_crypto_ahash_export_state {
+ u64 len;
+ u64 processed;
+
+ u32 digest;
+
+ u32 state[SHA512_DIGEST_SIZE / sizeof(u32)];
+ u8 cache[HASH_CACHE_SIZE];
+ void *sa_pointer;
+ void *token_context;
+};
+
+void mtk_crypto_dequeue_work(struct work_struct *work);
+void mtk_crypto_dequeue(struct mtk_crypto_priv *priv);
+int mtk_crypto_hmac_setkey(const char *alg, const u8 *key, unsigned int keylen,
+ void *istate, void *ostate);
+
+#endif /* _LOOKASIDE_H_ */
diff --git a/feed/kernel/crypto-eip/src/init.c b/feed/kernel/crypto-eip/src/init.c
index 075ca69..0fb430c 100644
--- a/feed/kernel/crypto-eip/src/init.c
+++ b/feed/kernel/crypto-eip/src/init.c
@@ -11,7 +11,11 @@
#include <linux/of.h>
#include <linux/of_address.h>
#include <linux/of_platform.h>
+#include <linux/crypto.h>
#include <linux/platform_device.h>
+#include <crypto/internal/skcipher.h>
+#include <crypto/internal/aead.h>
+#include <crypto/internal/hash.h>
#include <mtk_eth_soc.h>
#include <mtk_hnat/hnat.h>
@@ -20,6 +24,7 @@
#include "crypto-eip/crypto-eip.h"
#include "crypto-eip/ddk-wrapper.h"
+#include "crypto-eip/lookaside.h"
#include "crypto-eip/internal.h"
#define DRIVER_AUTHOR "Ren-Ting Wang <ren-ting.wang@mediatek.com, " \
@@ -27,6 +32,57 @@
struct mtk_crypto mcrypto;
struct device *crypto_dev;
+struct mtk_crypto_priv *priv;
+spinlock_t add_lock;
+
+static struct mtk_crypto_alg_template *mtk_crypto_algs[] = {
+ &mtk_crypto_cbc_aes,
+ &mtk_crypto_ecb_aes,
+ &mtk_crypto_cfb_aes,
+ &mtk_crypto_ofb_aes,
+ &mtk_crypto_ctr_aes,
+ &mtk_crypto_cbc_des,
+ &mtk_crypto_ecb_des,
+ &mtk_crypto_cbc_des3_ede,
+ &mtk_crypto_ecb_des3_ede,
+ &mtk_crypto_sha1,
+ &mtk_crypto_hmac_sha1,
+ &mtk_crypto_sha224,
+ &mtk_crypto_hmac_sha224,
+ &mtk_crypto_sha256,
+ &mtk_crypto_hmac_sha256,
+ &mtk_crypto_sha384,
+ &mtk_crypto_hmac_sha384,
+ &mtk_crypto_sha512,
+ &mtk_crypto_hmac_sha512,
+ &mtk_crypto_md5,
+ &mtk_crypto_hmac_md5,
+ &mtk_crypto_xcbcmac,
+ &mtk_crypto_cmac,
+ &mtk_crypto_hmac_sha1_cbc_aes,
+ &mtk_crypto_hmac_sha224_cbc_aes,
+ &mtk_crypto_hmac_sha256_cbc_aes,
+ &mtk_crypto_hmac_sha384_cbc_aes,
+ &mtk_crypto_hmac_sha512_cbc_aes,
+ &mtk_crypto_hmac_md5_cbc_aes,
+ &mtk_crypto_hmac_sha1_cbc_des3_ede,
+ &mtk_crypto_hmac_sha224_cbc_des3_ede,
+ &mtk_crypto_hmac_sha256_cbc_des3_ede,
+ &mtk_crypto_hmac_sha384_cbc_des3_ede,
+ &mtk_crypto_hmac_sha512_cbc_des3_ede,
+ &mtk_crypto_hmac_md5_cbc_des3_ede,
+ &mtk_crypto_hmac_sha1_cbc_des,
+ &mtk_crypto_hmac_sha224_cbc_des,
+ &mtk_crypto_hmac_sha256_cbc_des,
+ &mtk_crypto_hmac_sha384_cbc_des,
+ &mtk_crypto_hmac_sha512_cbc_des,
+ //&mtk_crypto_hmac_sha1_ctr_aes, /* no testcase, todo */
+ //&mtk_crypto_hmac_sha256_ctr_aes, /* no testcase, todo */
+ &mtk_crypto_gcm,
+ &mtk_crypto_rfc4106_gcm,
+ &mtk_crypto_rfc4543_gcm,
+ &mtk_crypto_rfc4309_ccm,
+};
inline void crypto_eth_write(u32 reg, u32 val)
{
@@ -79,6 +135,55 @@
.xdo_dev_policy_add = mtk_xfrm_offload_policy_add,
};
+static int mtk_crypto_register_algorithms(struct mtk_crypto_priv *priv)
+{
+ int i;
+ int j;
+ int ret;
+
+ for (i = 0; i < ARRAY_SIZE(mtk_crypto_algs); i++) {
+ mtk_crypto_algs[i]->priv = priv;
+
+ if (mtk_crypto_algs[i]->type == MTK_CRYPTO_ALG_TYPE_SKCIPHER)
+ ret = crypto_register_skcipher(&mtk_crypto_algs[i]->alg.skcipher);
+ else if (mtk_crypto_algs[i]->type == MTK_CRYPTO_ALG_TYPE_AEAD)
+ ret = crypto_register_aead(&mtk_crypto_algs[i]->alg.aead);
+ else
+ ret = crypto_register_ahash(&mtk_crypto_algs[i]->alg.ahash);
+
+ if (ret)
+ goto fail;
+ }
+
+ return 0;
+
+fail:
+ for (j = 0; j < i; j++) {
+ if (mtk_crypto_algs[j]->type == MTK_CRYPTO_ALG_TYPE_SKCIPHER)
+ crypto_unregister_skcipher(&mtk_crypto_algs[j]->alg.skcipher);
+ else if (mtk_crypto_algs[j]->type == MTK_CRYPTO_ALG_TYPE_AEAD)
+ crypto_unregister_aead(&mtk_crypto_algs[j]->alg.aead);
+ else
+ crypto_unregister_ahash(&mtk_crypto_algs[j]->alg.ahash);
+ }
+
+ return ret;
+}
+
+static void mtk_crypto_unregister_algorithms(void)
+{
+ int i;
+
+ for (i = 0; i < ARRAY_SIZE(mtk_crypto_algs); i++) {
+ if (mtk_crypto_algs[i]->type == MTK_CRYPTO_ALG_TYPE_SKCIPHER)
+ crypto_unregister_skcipher(&mtk_crypto_algs[i]->alg.skcipher);
+ else if (mtk_crypto_algs[i]->type == MTK_CRYPTO_ALG_TYPE_AEAD)
+ crypto_unregister_aead(&mtk_crypto_algs[i]->alg.aead);
+ else
+ crypto_unregister_ahash(&mtk_crypto_algs[i]->alg.ahash);
+ }
+}
+
static void mtk_crypto_xfrm_offload_deinit(struct mtk_eth *eth)
{
int i;
@@ -179,6 +284,32 @@
return 0;
}
+static int __init mtk_crypto_lookaside_data_init(struct platform_device *pdev)
+{
+ struct device *dev = &pdev->dev;
+
+ priv = devm_kzalloc(dev, sizeof(*priv), GFP_KERNEL);
+ if (!priv)
+ return -ENOMEM;
+
+ platform_set_drvdata(pdev, priv);
+
+ priv->mtk_eip_queue.work_data.priv = priv;
+ INIT_WORK(&priv->mtk_eip_queue.work_data.work, mtk_crypto_dequeue_work);
+
+ priv->mtk_eip_queue.workqueue = create_singlethread_workqueue("mtk_crypto_work");
+ if (!priv->mtk_eip_queue.workqueue)
+ return -ENOMEM;
+
+ crypto_init_queue(&priv->mtk_eip_queue.queue, EIP197_DEFAULT_RING_SIZE);
+
+ spin_lock_init(&priv->mtk_eip_queue.lock);
+ spin_lock_init(&priv->mtk_eip_queue.queue_lock);
+ spin_lock_init(&add_lock);
+
+ return 0;
+};
+
static int __init mtk_crypto_eip_dts_init(void)
{
struct platform_device *crypto_pdev;
@@ -224,6 +355,10 @@
crypto_dev = &crypto_pdev->dev;
+ ret = mtk_crypto_lookaside_data_init(crypto_pdev);
+ if (ret)
+ goto out;
+
out:
of_node_put(crypto_node);
@@ -269,6 +404,7 @@
}
mtk_crypto_xfrm_offload_init(mcrypto.eth);
+ mtk_crypto_register_algorithms(priv);
CRYPTO_INFO("crypto-eip init done\n");
@@ -278,10 +414,11 @@
static void __exit mtk_crypto_eip_exit(void)
{
/* TODO: deactivate all tunnel */
-
+ mtk_crypto_unregister_algorithms();
mtk_crypto_xfrm_offload_deinit(mcrypto.eth);
mtk_crypto_eip_hw_deinit();
+
}
module_init(mtk_crypto_eip_init);
diff --git a/feed/kernel/crypto-eip/src/lookaside-cipher.c b/feed/kernel/crypto-eip/src/lookaside-cipher.c
new file mode 100644
index 0000000..3069dfc
--- /dev/null
+++ b/feed/kernel/crypto-eip/src/lookaside-cipher.c
@@ -0,0 +1,1753 @@
+// SPDX-License-Identifier: GPL-2.0-or-later
+/*
+ * Copyright (C) 2023 MediaTek Inc.
+ *
+ * Author: Chris.Chou <chris.chou@mediatek.com>
+ * Ren-Ting Wang <ren-ting.wang@mediatek.com>
+ */
+
+#include <linux/bitops.h>
+#include <crypto/aes.h>
+#include <crypto/authenc.h>
+#include <crypto/gcm.h>
+#include <crypto/ghash.h>
+#include <crypto/ctr.h>
+#include <crypto/xts.h>
+#include <crypto/internal/des.h>
+#include <crypto/skcipher.h>
+#include <crypto/internal/skcipher.h>
+#include <crypto/aead.h>
+#include <crypto/internal/aead.h>
+
+#include "crypto-eip/crypto-eip.h"
+#include "crypto-eip/ddk-wrapper.h"
+#include "crypto-eip/lookaside.h"
+#include "crypto-eip/internal.h"
+
+static int mtk_crypto_skcipher_send(struct crypto_async_request *async)
+{
+ struct skcipher_request *req = skcipher_request_cast(async);
+ struct mtk_crypto_cipher_req *mtk_req = skcipher_request_ctx(req);
+ struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
+ int ret = 0;
+
+ ret = crypto_basic_cipher(async, mtk_req, req->src, req->dst, req->cryptlen,
+ 0, 0, req->iv, skcipher->ivsize);
+
+ if (ret != 0)
+ async->complete(async, ret);
+
+ return ret;
+}
+
+static int mtk_crypto_skcipher_handle_result(struct mtk_crypto_result *res, int err)
+{
+ struct crypto_async_request *async = res->async;
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(async->tfm);
+ struct skcipher_request *req = skcipher_request_cast(async);
+ struct mtk_crypto_cipher_req *mtk_req = skcipher_request_ctx(req);
+ struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
+
+ if (ctx->mode == MTK_CRYPTO_MODE_CBC && mtk_req->direction == MTK_CRYPTO_ENCRYPT)
+ sg_pcopy_to_buffer(req->dst, mtk_req->nr_dst, req->iv, skcipher->ivsize,
+ req->cryptlen - skcipher->ivsize);
+
+ if (req->src == req->dst) {
+ dma_unmap_sg(crypto_dev, req->src, mtk_req->nr_src, DMA_BIDIRECTIONAL);
+ } else {
+ dma_unmap_sg(crypto_dev, req->src, mtk_req->nr_src, DMA_TO_DEVICE);
+ dma_unmap_sg(crypto_dev, req->dst, mtk_req->nr_dst, DMA_FROM_DEVICE);
+ }
+
+ async->complete(async, err);
+
+ crypto_free_sglist(res->eip.pkt_handle);
+ crypto_free_sglist(res->dst);
+ crypto_free_sa(res->eip.sa);
+ crypto_free_token(res->eip.token);
+ kfree(res->eip.token_context);
+
+ return 0;
+}
+
+static int mtk_crypto_aead_send(struct crypto_async_request *async)
+{
+ struct aead_request *req = aead_request_cast(async);
+ struct crypto_aead *tfm = crypto_aead_reqtfm(req);
+ struct mtk_crypto_cipher_req *mtk_req = aead_request_ctx(req);
+ int ret;
+
+ ret = crypto_aead_cipher(async, mtk_req, req->src, req->dst, req->cryptlen,
+ req->assoclen, crypto_aead_authsize(tfm), req->iv,
+ crypto_aead_ivsize(tfm));
+
+ if (ret != 0)
+ async->complete(async, ret);
+
+ return ret;
+}
+
+static int mtk_crypto_aead_handle_result(struct mtk_crypto_result *res, int err)
+{
+ struct crypto_async_request *async = res->async;
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(async->tfm);
+ struct aead_request *req = aead_request_cast(async);
+ struct mtk_crypto_cipher_req *mtk_req = aead_request_ctx(req);
+ struct crypto_aead *aead = crypto_aead_reqtfm(req);
+ int pkt_size;
+ uint8_t *temp;
+
+ if (ctx->mode == MTK_CRYPTO_MODE_CCM && mtk_req->direction == MTK_CRYPTO_DECRYPT) {
+ if (mtk_req->direction == MTK_CRYPTO_ENCRYPT)
+ pkt_size = req->cryptlen + req->assoclen + crypto_aead_authsize(aead);
+ else
+ pkt_size = req->cryptlen + req->assoclen - crypto_aead_authsize(aead);
+
+ temp = kmalloc(pkt_size, GFP_KERNEL);
+ memset(temp, 0, pkt_size);
+ sg_copy_to_buffer(req->dst, mtk_req->nr_dst, temp + 8, pkt_size - 8);
+ sg_copy_from_buffer(req->dst, mtk_req->nr_dst, temp, pkt_size);
+ kfree(temp);
+ }
+
+ if (req->src == req->dst) {
+ dma_unmap_sg(crypto_dev, req->src, mtk_req->nr_src, DMA_BIDIRECTIONAL);
+ } else {
+ dma_unmap_sg(crypto_dev, req->src, mtk_req->nr_src, DMA_TO_DEVICE);
+ dma_unmap_sg(crypto_dev, req->dst, mtk_req->nr_dst, DMA_FROM_DEVICE);
+ }
+
+ crypto_free_sglist(res->eip.pkt_handle);
+ crypto_free_sglist(res->dst);
+ crypto_free_sa(res->eip.sa);
+ crypto_free_token(res->eip.token);
+ kfree(res->eip.token_context);
+
+ async->complete(async, err);
+ return 0;
+}
+
+static int mtk_crypto_skcipher_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+ struct mtk_crypto_alg_template *tmpl =
+ container_of(tfm->__crt_alg, struct mtk_crypto_alg_template,
+ alg.skcipher.base);
+
+ crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
+ sizeof(struct mtk_crypto_cipher_req));
+ ctx->priv = tmpl->priv;
+
+ ctx->base.send = mtk_crypto_skcipher_send;
+ ctx->base.handle_result = mtk_crypto_skcipher_handle_result;
+ return 0;
+}
+
+static int mtk_crypto_skcipher_aes_cbc_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ mtk_crypto_skcipher_cra_init(tfm);
+ ctx->alg = MTK_CRYPTO_AES;
+ ctx->blocksz = AES_BLOCK_SIZE;
+ ctx->mode = MTK_CRYPTO_MODE_CBC;
+ return 0;
+}
+
+static int mtk_crypto_skcipher_aes_ecb_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ mtk_crypto_skcipher_cra_init(tfm);
+ ctx->alg = MTK_CRYPTO_AES;
+ ctx->mode = MTK_CRYPTO_MODE_ECB;
+ ctx->blocksz = 0;
+ return 0;
+}
+
+static int mtk_crypto_queue_req(struct crypto_async_request *base,
+ struct mtk_crypto_cipher_req *mtk_req,
+ enum mtk_crypto_cipher_direction dir)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
+ struct mtk_crypto_priv *priv = ctx->priv;
+ int ret;
+
+ mtk_req->direction = dir;
+
+ spin_lock_bh(&priv->mtk_eip_queue.queue_lock);
+ ret = crypto_enqueue_request(&priv->mtk_eip_queue.queue, base);
+ spin_unlock_bh(&priv->mtk_eip_queue.queue_lock);
+
+ queue_work(priv->mtk_eip_queue.workqueue,
+ &priv->mtk_eip_queue.work_data.work);
+
+ return ret;
+}
+
+static int mtk_crypto_decrypt(struct skcipher_request *req)
+{
+ return mtk_crypto_queue_req(&req->base, skcipher_request_ctx(req), MTK_CRYPTO_DECRYPT);
+}
+
+static int mtk_crypto_encrypt(struct skcipher_request *req)
+{
+ return mtk_crypto_queue_req(&req->base, skcipher_request_ctx(req), MTK_CRYPTO_ENCRYPT);
+}
+
+static int mtk_crypto_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
+ const u8 *key, unsigned int len)
+{
+ struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+ struct crypto_aes_ctx aes;
+ int ret;
+ int i;
+
+ ret = aes_expandkey(&aes, key, len);
+ if (ret)
+ return ret;
+
+ for (i = 0; i < len / sizeof(u32); i++)
+ ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
+
+ ctx->key_len = len;
+
+ memzero_explicit(&aes, sizeof(aes));
+ return 0;
+}
+
+static void mtk_crypto_skcipher_cra_exit(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ memzero_explicit(ctx->key, sizeof(ctx->key));
+}
+
+
+struct mtk_crypto_alg_template mtk_crypto_cbc_aes = {
+ .type = MTK_CRYPTO_ALG_TYPE_SKCIPHER,
+ .alg.skcipher = {
+ .setkey = mtk_crypto_skcipher_aes_setkey,
+ .encrypt = mtk_crypto_encrypt,
+ .decrypt = mtk_crypto_decrypt,
+ .min_keysize = AES_MIN_KEY_SIZE,
+ .max_keysize = AES_MAX_KEY_SIZE,
+ .ivsize = AES_BLOCK_SIZE,
+ .base = {
+ .cra_name = "cbc(aes)",
+ .cra_driver_name = "crypto-eip-cbc-aes",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = AES_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_cipher_ctx),
+ .cra_init = mtk_crypto_skcipher_aes_cbc_cra_init,
+ .cra_exit = mtk_crypto_skcipher_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+};
+
+static int mtk_crypto_skcipher_aes_cfb_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ mtk_crypto_skcipher_cra_init(tfm);
+ ctx->alg = MTK_CRYPTO_AES;
+ ctx->blocksz = AES_BLOCK_SIZE;
+ ctx->mode = MTK_CRYPTO_MODE_CFB;
+ return 0;
+}
+
+struct mtk_crypto_alg_template mtk_crypto_cfb_aes = {
+ .type = MTK_CRYPTO_ALG_TYPE_SKCIPHER,
+ .alg.skcipher = {
+ .setkey = mtk_crypto_skcipher_aes_setkey,
+ .encrypt = mtk_crypto_encrypt,
+ .decrypt = mtk_crypto_decrypt,
+ .min_keysize = AES_MIN_KEY_SIZE,
+ .max_keysize = AES_MAX_KEY_SIZE,
+ .ivsize = AES_BLOCK_SIZE,
+ .base = {
+ .cra_name = "cfb(aes)",
+ .cra_driver_name = "crypto-eip-cfb-aes",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = 1,
+ .cra_ctxsize = sizeof(struct mtk_crypto_cipher_ctx),
+ .cra_alignmask = 0,
+ .cra_init = mtk_crypto_skcipher_aes_cfb_cra_init,
+ .cra_exit = mtk_crypto_skcipher_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+};
+
+static int mtk_crypto_skcipher_aes_ofb_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ mtk_crypto_skcipher_cra_init(tfm);
+ ctx->alg = MTK_CRYPTO_AES;
+ ctx->blocksz = AES_BLOCK_SIZE;
+ ctx->mode = MTK_CRYPTO_MODE_OFB;
+ return 0;
+}
+
+struct mtk_crypto_alg_template mtk_crypto_ofb_aes = {
+ .type = MTK_CRYPTO_ALG_TYPE_SKCIPHER,
+ .alg.skcipher = {
+ .setkey = mtk_crypto_skcipher_aes_setkey,
+ .encrypt = mtk_crypto_encrypt,
+ .decrypt = mtk_crypto_decrypt,
+ .min_keysize = AES_MIN_KEY_SIZE,
+ .max_keysize = AES_MAX_KEY_SIZE,
+ .ivsize = AES_BLOCK_SIZE,
+ .base = {
+ .cra_name = "ofb(aes)",
+ .cra_driver_name = "crypto-eip-ofb-aes",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = 1,
+ .cra_ctxsize = sizeof(struct mtk_crypto_cipher_ctx),
+ .cra_alignmask = 0,
+ .cra_init = mtk_crypto_skcipher_aes_ofb_cra_init,
+ .cra_exit = mtk_crypto_skcipher_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+};
+
+struct mtk_crypto_alg_template mtk_crypto_ecb_aes = {
+ .type = MTK_CRYPTO_ALG_TYPE_SKCIPHER,
+ .alg.skcipher = {
+ .setkey = mtk_crypto_skcipher_aes_setkey,
+ .encrypt = mtk_crypto_encrypt,
+ .decrypt = mtk_crypto_decrypt,
+ .min_keysize = AES_MIN_KEY_SIZE,
+ .max_keysize = AES_MAX_KEY_SIZE,
+ .base = {
+ .cra_name = "ecb(aes)",
+ .cra_driver_name = "crypto-eip-ecb-aes",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = AES_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_cipher_ctx),
+ .cra_init = mtk_crypto_skcipher_aes_ecb_cra_init,
+ .cra_exit = mtk_crypto_skcipher_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+};
+
+static int mtk_crypto_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
+ const u8 *key, unsigned int len)
+{
+ struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+ struct crypto_aes_ctx aes;
+ int ret;
+ int i;
+ unsigned int keylen;
+
+ ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
+ keylen = len - CTR_RFC3686_NONCE_SIZE;
+ ret = aes_expandkey(&aes, key, keylen);
+ if (ret)
+ return ret;
+
+ for (i = 0; i < keylen / sizeof(u32); i++)
+ ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
+
+ ctx->key_len = keylen;
+
+ memzero_explicit(&aes, sizeof(aes));
+ return 0;
+}
+
+static int mtk_crypto_skcipher_aes_ctr_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ mtk_crypto_skcipher_cra_init(tfm);
+ ctx->alg = MTK_CRYPTO_AES;
+ ctx->blocksz = AES_BLOCK_SIZE;
+ ctx->mode = MTK_CRYPTO_MODE_CTR;
+ return 0;
+}
+
+struct mtk_crypto_alg_template mtk_crypto_ctr_aes = {
+ .type = MTK_CRYPTO_ALG_TYPE_SKCIPHER,
+ .alg.skcipher = {
+ .setkey = mtk_crypto_skcipher_aesctr_setkey,
+ .encrypt = mtk_crypto_encrypt,
+ .decrypt = mtk_crypto_decrypt,
+ .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
+ .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
+ .ivsize = CTR_RFC3686_IV_SIZE,
+ .base = {
+ .cra_name = "rfc3686(ctr(aes))",
+ .cra_driver_name = "crypto-eip-ctr-aes",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = 1,
+ .cra_ctxsize = sizeof(struct mtk_crypto_cipher_ctx),
+ .cra_alignmask = 0,
+ .cra_init = mtk_crypto_skcipher_aes_ctr_cra_init,
+ .cra_exit = mtk_crypto_skcipher_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+};
+
+static int mtk_crypto_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
+ unsigned int len)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
+ int ret;
+
+ ret = verify_skcipher_des_key(ctfm, key);
+ if (ret)
+ return ret;
+ memcpy(ctx->key, key, len);
+ ctx->key_len = len;
+
+ return 0;
+}
+
+static int mtk_crypto_skcipher_des_cbc_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ mtk_crypto_skcipher_cra_init(tfm);
+ ctx->alg = MTK_CRYPTO_DES;
+ ctx->blocksz = DES_BLOCK_SIZE;
+ ctx->mode = MTK_CRYPTO_MODE_CBC;
+ return 0;
+}
+
+struct mtk_crypto_alg_template mtk_crypto_cbc_des = {
+ .type = MTK_CRYPTO_ALG_TYPE_SKCIPHER,
+ .alg.skcipher = {
+ .setkey = mtk_crypto_des_setkey,
+ .encrypt = mtk_crypto_encrypt,
+ .decrypt = mtk_crypto_decrypt,
+ .min_keysize = DES_KEY_SIZE,
+ .max_keysize = DES_KEY_SIZE,
+ .ivsize = DES_BLOCK_SIZE,
+ .base = {
+ .cra_name = "cbc(des)",
+ .cra_driver_name = "crypto-eip-cbc-des",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = DES_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_cipher_ctx),
+ .cra_alignmask = 0,
+ .cra_init = mtk_crypto_skcipher_des_cbc_cra_init,
+ .cra_exit = mtk_crypto_skcipher_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+};
+
+static int mtk_crypto_skcipher_des_ecb_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ mtk_crypto_skcipher_cra_init(tfm);
+ ctx->alg = MTK_CRYPTO_DES;
+ ctx->mode = MTK_CRYPTO_MODE_ECB;
+ ctx->blocksz = 0;
+ return 0;
+}
+
+struct mtk_crypto_alg_template mtk_crypto_ecb_des = {
+ .type = MTK_CRYPTO_ALG_TYPE_SKCIPHER,
+ .alg.skcipher = {
+ .setkey = mtk_crypto_des_setkey,
+ .encrypt = mtk_crypto_encrypt,
+ .decrypt = mtk_crypto_decrypt,
+ .min_keysize = DES_KEY_SIZE,
+ .max_keysize = DES_KEY_SIZE,
+ .base = {
+ .cra_name = "ecb(des)",
+ .cra_driver_name = "crypto-eip-ecb-des",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = DES_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_cipher_ctx),
+ .cra_alignmask = 0,
+ .cra_init = mtk_crypto_skcipher_des_ecb_cra_init,
+ .cra_exit = mtk_crypto_skcipher_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+};
+
+static int mtk_crypto_des3_ede_setkey(struct crypto_skcipher *ctfm,
+ const u8 *key, unsigned int len)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
+ int err;
+
+ err = verify_skcipher_des3_key(ctfm, key);
+ if (err)
+ return err;
+
+ memcpy(ctx->key, key, len);
+ ctx->key_len = len;
+ return 0;
+}
+
+static int mtk_crypto_skcipher_des3_cbc_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ mtk_crypto_skcipher_cra_init(tfm);
+ ctx->alg = MTK_CRYPTO_3DES;
+ ctx->mode = MTK_CRYPTO_MODE_CBC;
+ ctx->blocksz = 0;
+ return 0;
+}
+
+struct mtk_crypto_alg_template mtk_crypto_cbc_des3_ede = {
+ .type = MTK_CRYPTO_ALG_TYPE_SKCIPHER,
+ .alg.skcipher = {
+ .setkey = mtk_crypto_des3_ede_setkey,
+ .encrypt = mtk_crypto_encrypt,
+ .decrypt = mtk_crypto_decrypt,
+ .min_keysize = DES3_EDE_KEY_SIZE,
+ .max_keysize = DES3_EDE_KEY_SIZE,
+ .ivsize = DES3_EDE_BLOCK_SIZE,
+ .base = {
+ .cra_name = "cbc(des3_ede)",
+ .cra_driver_name = "crypto-eip-cbc-des3_ede",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = DES3_EDE_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_cipher_ctx),
+ .cra_alignmask = 0,
+ .cra_init = mtk_crypto_skcipher_des3_cbc_cra_init,
+ .cra_exit = mtk_crypto_skcipher_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+};
+
+static int mtk_crypto_skcipher_des3_ecb_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ mtk_crypto_skcipher_cra_init(tfm);
+ ctx->alg = MTK_CRYPTO_3DES;
+ ctx->mode = MTK_CRYPTO_MODE_ECB;
+ ctx->blocksz = 0;
+ return 0;
+}
+
+struct mtk_crypto_alg_template mtk_crypto_ecb_des3_ede = {
+ .type = MTK_CRYPTO_ALG_TYPE_SKCIPHER,
+ .alg.skcipher = {
+ .setkey = mtk_crypto_des3_ede_setkey,
+ .encrypt = mtk_crypto_encrypt,
+ .decrypt = mtk_crypto_decrypt,
+ .min_keysize = DES3_EDE_KEY_SIZE,
+ .max_keysize = DES3_EDE_KEY_SIZE,
+ .base = {
+ .cra_name = "ecb(des3_ede)",
+ .cra_driver_name = "crypto-eip-ecb-des3_ede",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = DES3_EDE_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_cipher_ctx),
+ .cra_alignmask = 0,
+ .cra_init = mtk_crypto_skcipher_des3_ecb_cra_init,
+ .cra_exit = mtk_crypto_skcipher_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+};
+
+static int mtk_crypto_aead_encrypt(struct aead_request *req)
+{
+ struct mtk_crypto_cipher_req *creq = aead_request_ctx(req);
+
+ return mtk_crypto_queue_req(&req->base, creq, MTK_CRYPTO_ENCRYPT);
+}
+
+static int mtk_crypto_aead_decrypt(struct aead_request *req)
+{
+ struct mtk_crypto_cipher_req *creq = aead_request_ctx(req);
+
+ return mtk_crypto_queue_req(&req->base, creq, MTK_CRYPTO_DECRYPT);
+}
+
+static void mtk_crypto_aead_cra_exit(struct crypto_tfm *tfm)
+{
+ mtk_crypto_skcipher_cra_exit(tfm);
+}
+
+static int mtk_crypto_aead_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+ struct mtk_crypto_alg_template *tmpl =
+ container_of(tfm->__crt_alg, struct mtk_crypto_alg_template, alg.aead.base);
+
+ crypto_aead_set_reqsize(__crypto_aead_cast(tfm), sizeof(struct mtk_crypto_cipher_req));
+
+ ctx->priv = tmpl->priv;
+
+ ctx->alg = MTK_CRYPTO_AES;
+ ctx->blocksz = AES_BLOCK_SIZE;
+ ctx->mode = MTK_CRYPTO_MODE_CBC;
+ ctx->aead = true;
+ ctx->base.send = mtk_crypto_aead_send;
+ ctx->base.handle_result = mtk_crypto_aead_handle_result;
+ return 0;
+}
+
+static int mtk_crypto_aead_sha1_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ mtk_crypto_aead_cra_init(tfm);
+ ctx->hash_alg = MTK_CRYPTO_ALG_SHA1;
+ ctx->state_sz = SHA1_DIGEST_SIZE;
+ return 0;
+}
+
+static int mtk_crypto_aead_setkey(struct crypto_aead *ctfm, const u8 *key, unsigned int len)
+{
+ struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+ struct mtk_crypto_ahash_export_state istate, ostate;
+ struct crypto_authenc_keys keys;
+ struct crypto_aes_ctx aes;
+ int err = -EINVAL, i;
+
+ if (unlikely(crypto_authenc_extractkeys(&keys, key, len)))
+ goto badkey;
+
+ if (ctx->mode == MTK_CRYPTO_MODE_CTR) {
+ if (unlikely(keys.enckeylen < CTR_RFC3686_NONCE_SIZE))
+ goto badkey;
+
+ ctx->nonce = *(u32 *)(keys.enckey + keys.enckeylen - CTR_RFC3686_NONCE_SIZE);
+ keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
+ }
+
+ switch (ctx->alg) {
+ case MTK_CRYPTO_AES:
+ err = aes_expandkey(&aes, keys.enckey, keys.enckeylen);
+ if (unlikely(err))
+ goto badkey;
+ break;
+ case MTK_CRYPTO_DES:
+ err = verify_aead_des_key(ctfm, keys.enckey, keys.enckeylen);
+ if (unlikely(err))
+ goto badkey;
+ break;
+ case MTK_CRYPTO_3DES:
+ err = verify_aead_des3_key(ctfm, keys.enckey, keys.enckeylen);
+ if (unlikely(err))
+ goto badkey;
+ break;
+ default:
+ CRYPTO_ERR("aead: unsupported cipher algorithm\n");
+ goto badkey;
+ }
+
+ switch (ctx->hash_alg) {
+ case MTK_CRYPTO_ALG_SHA1:
+ err = mtk_crypto_hmac_setkey("crypto-eip-sha1", keys.authkey,
+ keys.authkeylen, &istate, &ostate);
+ if (err)
+ goto badkey;
+ break;
+ case MTK_CRYPTO_ALG_SHA224:
+ if (mtk_crypto_hmac_setkey("crypto-eip-sha224", keys.authkey,
+ keys.authkeylen, &istate, &ostate))
+ goto badkey;
+ break;
+ case MTK_CRYPTO_ALG_SHA256:
+ if (mtk_crypto_hmac_setkey("crypto-eip-sha256", keys.authkey,
+ keys.authkeylen, &istate, &ostate))
+ goto badkey;
+ break;
+ case MTK_CRYPTO_ALG_SHA384:
+ if (mtk_crypto_hmac_setkey("crypto-eip-sha384", keys.authkey,
+ keys.authkeylen, &istate, &ostate))
+ goto badkey;
+ break;
+ case MTK_CRYPTO_ALG_SHA512:
+ if (mtk_crypto_hmac_setkey("crypto-eip-sha512", keys.authkey,
+ keys.authkeylen, &istate, &ostate))
+ goto badkey;
+ break;
+ case MTK_CRYPTO_ALG_MD5:
+ if (mtk_crypto_hmac_setkey("crypto-eip-md5", keys.authkey,
+ keys.authkeylen, &istate, &ostate))
+ goto badkey;
+ break;
+ default:
+ CRYPTO_ERR("aead: unsupported hash algorithm\n");
+ goto badkey;
+ }
+
+ for (i = 0; i < keys.enckeylen / sizeof(u32); i++)
+ ctx->key[i] = cpu_to_le32(((u32 *)keys.enckey)[i]);
+ ctx->key_len = keys.enckeylen;
+
+ memcpy(ctx->ipad, &istate.state, ctx->state_sz);
+ memcpy(ctx->opad, &ostate.state, ctx->state_sz);
+
+ if (istate.sa_pointer)
+ crypto_free_sa(istate.sa_pointer);
+ kfree(istate.token_context);
+ if (ostate.sa_pointer)
+ crypto_free_sa(ostate.sa_pointer);
+ kfree(ostate.token_context);
+
+ memzero_explicit(&keys, sizeof(keys));
+ return 0;
+
+badkey:
+ memzero_explicit(&keys, sizeof(keys));
+ return err;
+}
+
+struct mtk_crypto_alg_template mtk_crypto_hmac_sha1_cbc_aes = {
+ .type = MTK_CRYPTO_ALG_TYPE_AEAD,
+ .alg.aead = {
+ .setkey = mtk_crypto_aead_setkey,
+ .encrypt = mtk_crypto_aead_encrypt,
+ .decrypt = mtk_crypto_aead_decrypt,
+ .ivsize = AES_BLOCK_SIZE,
+ .maxauthsize = SHA1_DIGEST_SIZE,
+ .base = {
+ .cra_name = "authenc(hmac(sha1),cbc(aes))",
+ .cra_driver_name = "crypto-eip-hmac-sha1-cbc-aes",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = AES_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_cipher_ctx),
+ .cra_alignmask = 0,
+ .cra_init = mtk_crypto_aead_sha1_cra_init,
+ .cra_exit = mtk_crypto_aead_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+};
+
+static int mtk_crypto_aead_sha224_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ mtk_crypto_aead_cra_init(tfm);
+ ctx->hash_alg = MTK_CRYPTO_ALG_SHA224;
+ ctx->state_sz = SHA256_DIGEST_SIZE;
+ return 0;
+}
+
+struct mtk_crypto_alg_template mtk_crypto_hmac_sha224_cbc_aes = {
+ .type = MTK_CRYPTO_ALG_TYPE_AEAD,
+ .alg.aead = {
+ .setkey = mtk_crypto_aead_setkey,
+ .encrypt = mtk_crypto_aead_encrypt,
+ .decrypt = mtk_crypto_aead_decrypt,
+ .ivsize = AES_BLOCK_SIZE,
+ .maxauthsize = SHA224_DIGEST_SIZE,
+ .base = {
+ .cra_name = "authenc(hmac(sha224),cbc(aes))",
+ .cra_driver_name = "crypto-eip-hmac-sha224-cbc-aes",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = AES_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_cipher_ctx),
+ .cra_alignmask = 0,
+ .cra_init = mtk_crypto_aead_sha224_cra_init,
+ .cra_exit = mtk_crypto_aead_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+};
+
+static int mtk_crypto_aead_sha256_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ mtk_crypto_aead_cra_init(tfm);
+ ctx->hash_alg = MTK_CRYPTO_ALG_SHA256;
+ ctx->state_sz = SHA256_DIGEST_SIZE;
+ return 0;
+}
+
+struct mtk_crypto_alg_template mtk_crypto_hmac_sha256_cbc_aes = {
+ .type = MTK_CRYPTO_ALG_TYPE_AEAD,
+ .alg.aead = {
+ .setkey = mtk_crypto_aead_setkey,
+ .encrypt = mtk_crypto_aead_encrypt,
+ .decrypt = mtk_crypto_aead_decrypt,
+ .ivsize = AES_BLOCK_SIZE,
+ .maxauthsize = SHA256_DIGEST_SIZE,
+ .base = {
+ .cra_name = "authenc(hmac(sha256),cbc(aes))",
+ .cra_driver_name = "crypto-eip-hmac-sha256-cbc-aes",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = AES_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_cipher_ctx),
+ .cra_alignmask = 0,
+ .cra_init = mtk_crypto_aead_sha256_cra_init,
+ .cra_exit = mtk_crypto_aead_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+};
+
+static int mtk_crypto_aead_sha384_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ mtk_crypto_aead_cra_init(tfm);
+ ctx->hash_alg = MTK_CRYPTO_ALG_SHA384;
+ ctx->state_sz = SHA512_DIGEST_SIZE;
+ return 0;
+}
+
+struct mtk_crypto_alg_template mtk_crypto_hmac_sha384_cbc_aes = {
+ .type = MTK_CRYPTO_ALG_TYPE_AEAD,
+ .alg.aead = {
+ .setkey = mtk_crypto_aead_setkey,
+ .encrypt = mtk_crypto_aead_encrypt,
+ .decrypt = mtk_crypto_aead_decrypt,
+ .ivsize = AES_BLOCK_SIZE,
+ .maxauthsize = SHA384_DIGEST_SIZE,
+ .base = {
+ .cra_name = "authenc(hmac(sha384),cbc(aes))",
+ .cra_driver_name = "crypto-eip-hmac-sha384-cbc-aes",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = AES_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_cipher_ctx),
+ .cra_alignmask = 0,
+ .cra_init = mtk_crypto_aead_sha384_cra_init,
+ .cra_exit = mtk_crypto_aead_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+};
+
+static int mtk_crypto_aead_sha512_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ mtk_crypto_aead_cra_init(tfm);
+ ctx->hash_alg = MTK_CRYPTO_ALG_SHA512;
+ ctx->state_sz = SHA512_DIGEST_SIZE;
+ return 0;
+}
+
+struct mtk_crypto_alg_template mtk_crypto_hmac_sha512_cbc_aes = {
+ .type = MTK_CRYPTO_ALG_TYPE_AEAD,
+ .alg.aead = {
+ .setkey = mtk_crypto_aead_setkey,
+ .encrypt = mtk_crypto_aead_encrypt,
+ .decrypt = mtk_crypto_aead_decrypt,
+ .ivsize = AES_BLOCK_SIZE,
+ .maxauthsize = SHA512_DIGEST_SIZE,
+ .base = {
+ .cra_name = "authenc(hmac(sha512),cbc(aes))",
+ .cra_driver_name = "crypto-eip-hmac-sha512-cbc-aes",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = AES_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_cipher_ctx),
+ .cra_alignmask = 0,
+ .cra_init = mtk_crypto_aead_sha512_cra_init,
+ .cra_exit = mtk_crypto_aead_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+};
+
+static int mtk_crypto_aead_md5_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ mtk_crypto_aead_cra_init(tfm);
+ ctx->hash_alg = MTK_CRYPTO_ALG_MD5;
+ ctx->state_sz = MD5_DIGEST_SIZE;
+ return 0;
+}
+
+struct mtk_crypto_alg_template mtk_crypto_hmac_md5_cbc_aes = {
+ .type = MTK_CRYPTO_ALG_TYPE_AEAD,
+ .alg.aead = {
+ .setkey = mtk_crypto_aead_setkey,
+ .encrypt = mtk_crypto_aead_encrypt,
+ .decrypt = mtk_crypto_aead_decrypt,
+ .ivsize = AES_BLOCK_SIZE,
+ .maxauthsize = SHA512_DIGEST_SIZE,
+ .base = {
+ .cra_name = "authenc(hmac(md5),cbc(aes))",
+ .cra_driver_name = "crypto-eip-hmac-md5-cbc-aes",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = AES_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_cipher_ctx),
+ .cra_alignmask = 0,
+ .cra_init = mtk_crypto_aead_md5_cra_init,
+ .cra_exit = mtk_crypto_aead_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+};
+
+static int mtk_crypto_aead_sha1_des3_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ mtk_crypto_aead_sha1_cra_init(tfm);
+ ctx->alg = MTK_CRYPTO_3DES;
+ ctx->blocksz = DES3_EDE_BLOCK_SIZE;
+ return 0;
+}
+
+struct mtk_crypto_alg_template mtk_crypto_hmac_sha1_cbc_des3_ede = {
+ .type = MTK_CRYPTO_ALG_TYPE_AEAD,
+ .alg.aead = {
+ .setkey = mtk_crypto_aead_setkey,
+ .encrypt = mtk_crypto_aead_encrypt,
+ .decrypt = mtk_crypto_aead_decrypt,
+ .ivsize = DES3_EDE_BLOCK_SIZE,
+ .maxauthsize = SHA1_DIGEST_SIZE,
+ .base = {
+ .cra_name = "authenc(hmac(sha1),cbc(des3_ede))",
+ .cra_driver_name = "crypto-eip-hmac-sha1-cbc-des3_ede",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = DES3_EDE_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_cipher_ctx),
+ .cra_alignmask = 0,
+ .cra_init = mtk_crypto_aead_sha1_des3_cra_init,
+ .cra_exit = mtk_crypto_aead_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+};
+
+static int mtk_crypto_aead_sha224_des3_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ mtk_crypto_aead_sha224_cra_init(tfm);
+ ctx->alg = MTK_CRYPTO_3DES;
+ ctx->blocksz = DES3_EDE_BLOCK_SIZE;
+ return 0;
+}
+
+struct mtk_crypto_alg_template mtk_crypto_hmac_sha224_cbc_des3_ede = {
+ .type = MTK_CRYPTO_ALG_TYPE_AEAD,
+ .alg.aead = {
+ .setkey = mtk_crypto_aead_setkey,
+ .encrypt = mtk_crypto_aead_encrypt,
+ .decrypt = mtk_crypto_aead_decrypt,
+ .ivsize = DES3_EDE_BLOCK_SIZE,
+ .maxauthsize = SHA224_DIGEST_SIZE,
+ .base = {
+ .cra_name = "authenc(hmac(sha224),cbc(des3_ede))",
+ .cra_driver_name = "crypto-eip-hmac-sha224-cbc-des3_ede",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = DES3_EDE_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_cipher_ctx),
+ .cra_alignmask = 0,
+ .cra_init = mtk_crypto_aead_sha224_des3_cra_init,
+ .cra_exit = mtk_crypto_aead_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+};
+
+static int mtk_crypto_aead_sha256_des3_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ mtk_crypto_aead_sha256_cra_init(tfm);
+ ctx->alg = MTK_CRYPTO_3DES;
+ ctx->blocksz = DES3_EDE_BLOCK_SIZE;
+ return 0;
+}
+
+struct mtk_crypto_alg_template mtk_crypto_hmac_sha256_cbc_des3_ede = {
+ .type = MTK_CRYPTO_ALG_TYPE_AEAD,
+ .alg.aead = {
+ .setkey = mtk_crypto_aead_setkey,
+ .encrypt = mtk_crypto_aead_encrypt,
+ .decrypt = mtk_crypto_aead_decrypt,
+ .ivsize = DES3_EDE_BLOCK_SIZE,
+ .maxauthsize = SHA256_DIGEST_SIZE,
+ .base = {
+ .cra_name = "authenc(hmac(sha256),cbc(des3_ede))",
+ .cra_driver_name = "crypto-eip-hmac-sha256-cbc-des3_ede",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = DES3_EDE_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_cipher_ctx),
+ .cra_alignmask = 0,
+ .cra_init = mtk_crypto_aead_sha256_des3_cra_init,
+ .cra_exit = mtk_crypto_aead_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+};
+
+static int mtk_crypto_aead_sha384_des3_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ mtk_crypto_aead_sha384_cra_init(tfm);
+ ctx->alg = MTK_CRYPTO_3DES;
+ ctx->blocksz = DES3_EDE_BLOCK_SIZE;
+ return 0;
+}
+
+struct mtk_crypto_alg_template mtk_crypto_hmac_sha384_cbc_des3_ede = {
+ .type = MTK_CRYPTO_ALG_TYPE_AEAD,
+ .alg.aead = {
+ .setkey = mtk_crypto_aead_setkey,
+ .encrypt = mtk_crypto_aead_encrypt,
+ .decrypt = mtk_crypto_aead_decrypt,
+ .ivsize = DES3_EDE_BLOCK_SIZE,
+ .maxauthsize = SHA384_DIGEST_SIZE,
+ .base = {
+ .cra_name = "authenc(hmac(sha384),cbc(des3_ede))",
+ .cra_driver_name = "crypto-eip-hmac-sha384-cbc-des3_ede",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = DES3_EDE_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_cipher_ctx),
+ .cra_alignmask = 0,
+ .cra_init = mtk_crypto_aead_sha384_des3_cra_init,
+ .cra_exit = mtk_crypto_aead_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+};
+
+static int mtk_crypto_aead_sha512_des3_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ mtk_crypto_aead_sha512_cra_init(tfm);
+ ctx->alg = MTK_CRYPTO_3DES;
+ ctx->blocksz = DES3_EDE_BLOCK_SIZE;
+ return 0;
+}
+
+struct mtk_crypto_alg_template mtk_crypto_hmac_sha512_cbc_des3_ede = {
+ .type = MTK_CRYPTO_ALG_TYPE_AEAD,
+ .alg.aead = {
+ .setkey = mtk_crypto_aead_setkey,
+ .encrypt = mtk_crypto_aead_encrypt,
+ .decrypt = mtk_crypto_aead_decrypt,
+ .ivsize = DES3_EDE_BLOCK_SIZE,
+ .maxauthsize = SHA512_DIGEST_SIZE,
+ .base = {
+ .cra_name = "authenc(hmac(sha512),cbc(des3_ede))",
+ .cra_driver_name = "crypto-eip-hmac-sha512-cbc-des3_ede",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = DES3_EDE_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_cipher_ctx),
+ .cra_alignmask = 0,
+ .cra_init = mtk_crypto_aead_sha512_des3_cra_init,
+ .cra_exit = mtk_crypto_aead_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+};
+
+static int mtk_crypto_aead_md5_des3_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ mtk_crypto_aead_md5_cra_init(tfm);
+ ctx->alg = MTK_CRYPTO_3DES;
+ ctx->blocksz = DES3_EDE_BLOCK_SIZE;
+ return 0;
+}
+
+struct mtk_crypto_alg_template mtk_crypto_hmac_md5_cbc_des3_ede = {
+ .type = MTK_CRYPTO_ALG_TYPE_AEAD,
+ .alg.aead = {
+ .setkey = mtk_crypto_aead_setkey,
+ .encrypt = mtk_crypto_aead_encrypt,
+ .decrypt = mtk_crypto_aead_decrypt,
+ .ivsize = DES3_EDE_BLOCK_SIZE,
+ .maxauthsize = MD5_DIGEST_SIZE,
+ .base = {
+ .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
+ .cra_driver_name = "crypto-eip-hmac-md5-cbc-des3_ede",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = DES3_EDE_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_cipher_ctx),
+ .cra_alignmask = 0,
+ .cra_init = mtk_crypto_aead_md5_des3_cra_init,
+ .cra_exit = mtk_crypto_aead_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+};
+
+static int mtk_crypto_aead_sha1_des_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ mtk_crypto_aead_sha1_cra_init(tfm);
+ ctx->alg = MTK_CRYPTO_DES;
+ ctx->blocksz = DES_BLOCK_SIZE;
+ return 0;
+}
+
+struct mtk_crypto_alg_template mtk_crypto_hmac_sha1_cbc_des = {
+ .type = MTK_CRYPTO_ALG_TYPE_AEAD,
+ .alg.aead = {
+ .setkey = mtk_crypto_aead_setkey,
+ .encrypt = mtk_crypto_aead_encrypt,
+ .decrypt = mtk_crypto_aead_decrypt,
+ .ivsize = DES_BLOCK_SIZE,
+ .maxauthsize = SHA1_DIGEST_SIZE,
+ .base = {
+ .cra_name = "authenc(hmac(sha1),cbc(des))",
+ .cra_driver_name = "crypto-eip-hmac-sha1-cbc-des",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = DES_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_cipher_ctx),
+ .cra_alignmask = 0,
+ .cra_init = mtk_crypto_aead_sha1_des_cra_init,
+ .cra_exit = mtk_crypto_aead_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+};
+
+static int mtk_crypto_aead_sha224_des_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ mtk_crypto_aead_sha224_cra_init(tfm);
+ ctx->alg = MTK_CRYPTO_DES;
+ ctx->blocksz = DES_BLOCK_SIZE;
+ return 0;
+}
+
+struct mtk_crypto_alg_template mtk_crypto_hmac_sha224_cbc_des = {
+ .type = MTK_CRYPTO_ALG_TYPE_AEAD,
+ .alg.aead = {
+ .setkey = mtk_crypto_aead_setkey,
+ .encrypt = mtk_crypto_aead_encrypt,
+ .decrypt = mtk_crypto_aead_decrypt,
+ .ivsize = DES_BLOCK_SIZE,
+ .maxauthsize = SHA224_DIGEST_SIZE,
+ .base = {
+ .cra_name = "authenc(hmac(sha224),cbc(des))",
+ .cra_driver_name = "crypto-eip-hmac-sha224-cbc-des",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = DES_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_cipher_ctx),
+ .cra_alignmask = 0,
+ .cra_init = mtk_crypto_aead_sha224_des_cra_init,
+ .cra_exit = mtk_crypto_aead_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+};
+
+static int mtk_crypto_aead_sha256_des_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ mtk_crypto_aead_sha256_cra_init(tfm);
+ ctx->alg = MTK_CRYPTO_DES;
+ ctx->blocksz = DES_BLOCK_SIZE;
+ return 0;
+}
+
+struct mtk_crypto_alg_template mtk_crypto_hmac_sha256_cbc_des = {
+ .type = MTK_CRYPTO_ALG_TYPE_AEAD,
+ .alg.aead = {
+ .setkey = mtk_crypto_aead_setkey,
+ .encrypt = mtk_crypto_aead_encrypt,
+ .decrypt = mtk_crypto_aead_decrypt,
+ .ivsize = DES_BLOCK_SIZE,
+ .maxauthsize = SHA256_DIGEST_SIZE,
+ .base = {
+ .cra_name = "authenc(hmac(sha256),cbc(des))",
+ .cra_driver_name = "crypto-eip-hmac-sha256-cbc-des",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = DES_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_cipher_ctx),
+ .cra_alignmask = 0,
+ .cra_init = mtk_crypto_aead_sha256_des_cra_init,
+ .cra_exit = mtk_crypto_aead_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+};
+
+static int mtk_crypto_aead_sha384_des_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ mtk_crypto_aead_sha384_cra_init(tfm);
+ ctx->alg = MTK_CRYPTO_DES;
+ ctx->blocksz = DES_BLOCK_SIZE;
+ return 0;
+}
+
+struct mtk_crypto_alg_template mtk_crypto_hmac_sha384_cbc_des = {
+ .type = MTK_CRYPTO_ALG_TYPE_AEAD,
+ .alg.aead = {
+ .setkey = mtk_crypto_aead_setkey,
+ .encrypt = mtk_crypto_aead_encrypt,
+ .decrypt = mtk_crypto_aead_decrypt,
+ .ivsize = DES_BLOCK_SIZE,
+ .maxauthsize = SHA384_DIGEST_SIZE,
+ .base = {
+ .cra_name = "authenc(hmac(sha384),cbc(des))",
+ .cra_driver_name = "crypto-eip-hmac-sha384-cbc-des",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = DES_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_cipher_ctx),
+ .cra_alignmask = 0,
+ .cra_init = mtk_crypto_aead_sha384_des_cra_init,
+ .cra_exit = mtk_crypto_aead_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+};
+
+static int mtk_crypto_aead_sha512_des_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ mtk_crypto_aead_sha512_cra_init(tfm);
+ ctx->alg = MTK_CRYPTO_DES;
+ ctx->blocksz = DES_BLOCK_SIZE;
+ return 0;
+}
+
+struct mtk_crypto_alg_template mtk_crypto_hmac_sha512_cbc_des = {
+ .type = MTK_CRYPTO_ALG_TYPE_AEAD,
+ .alg.aead = {
+ .setkey = mtk_crypto_aead_setkey,
+ .encrypt = mtk_crypto_aead_encrypt,
+ .decrypt = mtk_crypto_aead_decrypt,
+ .ivsize = DES_BLOCK_SIZE,
+ .maxauthsize = SHA512_DIGEST_SIZE,
+ .base = {
+ .cra_name = "authenc(hmac(sha512),cbc(des))",
+ .cra_driver_name = "crypto-eip-hmac-sha512-cbc-des",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = DES_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_cipher_ctx),
+ .cra_alignmask = 0,
+ .cra_init = mtk_crypto_aead_sha512_des_cra_init,
+ .cra_exit = mtk_crypto_aead_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+};
+
+static int mtk_crypto_aead_sha1_ctr_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ mtk_crypto_aead_sha1_cra_init(tfm);
+ ctx->mode = MTK_CRYPTO_MODE_CTR;
+ return 0;
+}
+
+struct mtk_crypto_alg_template mtk_crypto_hmac_sha1_ctr_aes = {
+ .type = MTK_CRYPTO_ALG_TYPE_AEAD,
+ .alg.aead = {
+ .setkey = mtk_crypto_aead_setkey,
+ .encrypt = mtk_crypto_aead_encrypt,
+ .decrypt = mtk_crypto_aead_decrypt,
+ .ivsize = CTR_RFC3686_IV_SIZE,
+ .maxauthsize = SHA1_DIGEST_SIZE,
+ .base = {
+ .cra_name = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
+ .cra_driver_name = "crypto-eip-hmac-sha1-ctr-aes",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = 1,
+ .cra_ctxsize = sizeof(struct mtk_crypto_cipher_ctx),
+ .cra_alignmask = 0,
+ .cra_init = mtk_crypto_aead_sha1_ctr_cra_init,
+ .cra_exit = mtk_crypto_aead_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+};
+
+static int mtk_crypto_aead_sha256_ctr_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ mtk_crypto_aead_sha256_cra_init(tfm);
+ ctx->mode = MTK_CRYPTO_MODE_CTR;
+ return 0;
+}
+
+struct mtk_crypto_alg_template mtk_crypto_hmac_sha256_ctr_aes = {
+ .type = MTK_CRYPTO_ALG_TYPE_AEAD,
+ .alg.aead = {
+ .setkey = mtk_crypto_aead_setkey,
+ .encrypt = mtk_crypto_aead_encrypt,
+ .decrypt = mtk_crypto_aead_decrypt,
+ .ivsize = CTR_RFC3686_IV_SIZE,
+ .maxauthsize = SHA256_DIGEST_SIZE,
+ .base = {
+ .cra_name = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
+ .cra_driver_name = "crypto-eip-hmac-sha256-ctr-aes",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = 1,
+ .cra_ctxsize = sizeof(struct mtk_crypto_cipher_ctx),
+ .cra_alignmask = 0,
+ .cra_init = mtk_crypto_aead_sha256_ctr_cra_init,
+ .cra_exit = mtk_crypto_aead_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+};
+
+static int mtk_crypto_aead_sha224_ctr_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ mtk_crypto_aead_sha224_cra_init(tfm);
+ ctx->mode = MTK_CRYPTO_MODE_CTR;
+ return 0;
+}
+
+struct mtk_crypto_alg_template mtk_crypto_hmac_sha224_ctr_aes = {
+ .type = MTK_CRYPTO_ALG_TYPE_AEAD,
+ .alg.aead = {
+ .setkey = mtk_crypto_aead_setkey,
+ .encrypt = mtk_crypto_aead_encrypt,
+ .decrypt = mtk_crypto_aead_decrypt,
+ .ivsize = CTR_RFC3686_IV_SIZE,
+ .maxauthsize = SHA224_DIGEST_SIZE,
+ .base = {
+ .cra_name = "authenc(hmac(sha224),rfc3686(ctr(aes)))",
+ .cra_driver_name = "crypto-eip-hmac-sha224-ctr-aes",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = 1,
+ .cra_ctxsize = sizeof(struct mtk_crypto_cipher_ctx),
+ .cra_alignmask = 0,
+ .cra_init = mtk_crypto_aead_sha224_ctr_cra_init,
+ .cra_exit = mtk_crypto_aead_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+};
+
+static int mtk_crypto_aead_sha512_ctr_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ mtk_crypto_aead_sha512_cra_init(tfm);
+ ctx->mode = MTK_CRYPTO_MODE_CTR;
+ return 0;
+}
+
+struct mtk_crypto_alg_template mtk_crypto_hmac_sha512_ctr_aes = {
+ .type = MTK_CRYPTO_ALG_TYPE_AEAD,
+ .alg.aead = {
+ .setkey = mtk_crypto_aead_setkey,
+ .encrypt = mtk_crypto_aead_encrypt,
+ .decrypt = mtk_crypto_aead_decrypt,
+ .ivsize = CTR_RFC3686_IV_SIZE,
+ .maxauthsize = SHA512_DIGEST_SIZE,
+ .base = {
+ .cra_name = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
+ .cra_driver_name = "crypto-eip-hmac-sha512-ctr-aes",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = 1,
+ .cra_ctxsize = sizeof(struct mtk_crypto_cipher_ctx),
+ .cra_alignmask = 0,
+ .cra_init = mtk_crypto_aead_sha512_ctr_cra_init,
+ .cra_exit = mtk_crypto_aead_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+};
+
+static int mtk_crypto_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
+ unsigned int len)
+{
+ struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+ struct crypto_aes_ctx aes;
+ u32 hashkey[AES_BLOCK_SIZE >> 2];
+ int ret, i;
+
+ ret = aes_expandkey(&aes, key, len);
+ if (ret) {
+ memzero_explicit(&aes, sizeof(aes));
+ return ret;
+ }
+
+ for (i = 0; i < len / sizeof(u32); i++)
+ ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
+
+ ctx->key_len = len;
+
+ /* Compute hash key by encrypting zeros with cipher key */
+ crypto_cipher_clear_flags(ctx->hkaes, CRYPTO_TFM_REQ_MASK);
+ crypto_cipher_set_flags(ctx->hkaes, crypto_aead_get_flags(ctfm) &
+ CRYPTO_TFM_REQ_MASK);
+ ret = crypto_cipher_setkey(ctx->hkaes, key, len);
+ if (ret)
+ return ret;
+
+ memset(hashkey, 0, AES_BLOCK_SIZE);
+ crypto_cipher_encrypt_one(ctx->hkaes, (u8 *)hashkey, (u8 *)hashkey);
+
+ for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
+ ctx->ipad[i] = cpu_to_be32(hashkey[i]);
+
+ memzero_explicit(hashkey, AES_BLOCK_SIZE);
+ memzero_explicit(&aes, sizeof(aes));
+ return 0;
+}
+
+static int mtk_crypto_aead_gcm_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ mtk_crypto_aead_cra_init(tfm);
+ ctx->hash_alg = MTK_CRYPTO_ALG_GCM;
+ ctx->state_sz = GHASH_BLOCK_SIZE;
+ ctx->mode = MTK_CRYPTO_MODE_GCM;
+
+ ctx->hkaes = crypto_alloc_cipher("aes", 0, 0);
+ return PTR_ERR_OR_ZERO(ctx->hkaes);
+}
+
+static void mtk_crypto_aead_gcm_cra_exit(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ crypto_free_cipher(ctx->hkaes);
+ mtk_crypto_aead_cra_exit(tfm);
+}
+
+static int mtk_crypto_aead_gcm_setauthsize(struct crypto_aead *tfm,
+ unsigned int authsize)
+{
+ return crypto_gcm_check_authsize(authsize);
+}
+
+struct mtk_crypto_alg_template mtk_crypto_gcm = {
+ .type = MTK_CRYPTO_ALG_TYPE_AEAD,
+ .alg.aead = {
+ .setkey = mtk_crypto_aead_gcm_setkey,
+ .setauthsize = mtk_crypto_aead_gcm_setauthsize,
+ .encrypt = mtk_crypto_aead_encrypt,
+ .decrypt = mtk_crypto_aead_decrypt,
+ .ivsize = GCM_AES_IV_SIZE,
+ .maxauthsize = GHASH_DIGEST_SIZE,
+ .base = {
+ .cra_name = "gcm(aes)",
+ .cra_driver_name = "crypto-eip-gcm-aes",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = 1,
+ .cra_ctxsize = sizeof(struct mtk_crypto_cipher_ctx),
+ .cra_alignmask = 0,
+ .cra_init = mtk_crypto_aead_gcm_cra_init,
+ .cra_exit = mtk_crypto_aead_gcm_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+};
+
+static int mtk_crypto_rfc4106_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
+ unsigned int len)
+{
+ struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ /* last 4 bytes of key are the nonce! */
+ ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
+
+ len -= CTR_RFC3686_NONCE_SIZE;
+ return mtk_crypto_aead_gcm_setkey(ctfm, key, len);
+}
+
+static int mtk_crypto_rfc4106_gcm_setauthsize(struct crypto_aead *tfm,
+ unsigned int authsize)
+{
+ return crypto_rfc4106_check_authsize(authsize);
+}
+
+static int mtk_crypto_rfc4106_encrypt(struct aead_request *req)
+{
+ return crypto_ipsec_check_assoclen(req->assoclen) ?:
+ mtk_crypto_aead_encrypt(req);
+}
+
+static int mtk_crypto_rfc4106_decrypt(struct aead_request *req)
+{
+ return crypto_ipsec_check_assoclen(req->assoclen) ?:
+ mtk_crypto_aead_decrypt(req);
+}
+
+static int mtk_crypto_rfc4106_gcm_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+ int ret;
+
+ ret = mtk_crypto_aead_gcm_cra_init(tfm);
+ ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
+ return ret;
+}
+
+struct mtk_crypto_alg_template mtk_crypto_rfc4106_gcm = {
+ .type = MTK_CRYPTO_ALG_TYPE_AEAD,
+ .alg.aead = {
+ .setkey = mtk_crypto_rfc4106_gcm_setkey,
+ .setauthsize = mtk_crypto_rfc4106_gcm_setauthsize,
+ .encrypt = mtk_crypto_rfc4106_encrypt,
+ .decrypt = mtk_crypto_rfc4106_decrypt,
+ .ivsize = GCM_RFC4106_IV_SIZE,
+ .maxauthsize = GHASH_DIGEST_SIZE,
+ .base = {
+ .cra_name = "rfc4106(gcm(aes))",
+ .cra_driver_name = "crypto-eip-rfc4106-gcm-aes",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = 1,
+ .cra_ctxsize = sizeof(struct mtk_crypto_cipher_ctx),
+ .cra_alignmask = 0,
+ .cra_init = mtk_crypto_rfc4106_gcm_cra_init,
+ .cra_exit = mtk_crypto_aead_gcm_cra_exit,
+ },
+ },
+};
+
+static int mtk_crypto_rfc4543_gcm_setauthsize(struct crypto_aead *tfm,
+ unsigned int authsize)
+{
+ if (authsize != GHASH_DIGEST_SIZE)
+ return -EINVAL;
+
+ return 0;
+}
+
+static int mtk_crypto_rfc4543_gcm_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+ int ret;
+
+ ret = mtk_crypto_aead_gcm_cra_init(tfm);
+ ctx->hash_alg = MTK_CRYPTO_ALG_GMAC;
+ ctx->mode = MTK_CRYPTO_MODE_GMAC;
+ ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
+ return ret;
+}
+
+struct mtk_crypto_alg_template mtk_crypto_rfc4543_gcm = {
+ .type = MTK_CRYPTO_ALG_TYPE_AEAD,
+ .alg.aead = {
+ .setkey = mtk_crypto_rfc4106_gcm_setkey,
+ .setauthsize = mtk_crypto_rfc4543_gcm_setauthsize,
+ .encrypt = mtk_crypto_rfc4106_encrypt,
+ .decrypt = mtk_crypto_rfc4106_decrypt,
+ .ivsize = GCM_RFC4543_IV_SIZE,
+ .maxauthsize = GHASH_DIGEST_SIZE,
+ .base = {
+ .cra_name = "rfc4543(gcm(aes))",
+ .cra_driver_name = "crypto-eip-rfc4543-gcm-aes",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = 1,
+ .cra_ctxsize = sizeof(struct mtk_crypto_cipher_ctx),
+ .cra_alignmask = 0,
+ .cra_init = mtk_crypto_rfc4543_gcm_cra_init,
+ .cra_exit = mtk_crypto_aead_gcm_cra_exit,
+ },
+ },
+};
+
+static int mtk_crypto_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
+ unsigned int len)
+{
+ struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+ struct crypto_aes_ctx aes;
+ int ret, i;
+
+ ret = aes_expandkey(&aes, key, len);
+ if (ret) {
+ memzero_explicit(&aes, sizeof(aes));
+ return ret;
+ }
+
+ for (i = 0; i < len / sizeof(u32); i++) {
+ ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
+ ctx->ipad[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
+ cpu_to_be32(aes.key_enc[i]);
+ }
+
+ ctx->key_len = len;
+ ctx->state_sz = 2 * AES_BLOCK_SIZE + len;
+ ctx->hash_alg = MTK_CRYPTO_ALG_CCM;
+
+ memzero_explicit(&aes, sizeof(aes));
+ return 0;
+}
+
+static int mtk_crypto_rfc4309_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
+ unsigned int len)
+{
+ struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ *(u8 *) &ctx->nonce = 3;
+ memcpy((u8 *) &ctx->nonce + 1, key + len -
+ EIP197_AEAD_IPSEC_CCM_NONCE_SIZE,
+ EIP197_AEAD_IPSEC_CCM_NONCE_SIZE);
+
+ len -= EIP197_AEAD_IPSEC_CCM_NONCE_SIZE;
+ return mtk_crypto_aead_ccm_setkey(ctfm, key, len);
+}
+
+static int mtk_crypto_rfc4309_ccm_setauthsize(struct crypto_aead *tfm,
+ unsigned int authsize)
+{
+ switch (authsize) {
+ case 8:
+ case 12:
+ case 16:
+ break;
+ default:
+ return -EINVAL;
+ }
+
+ return 0;
+}
+
+static int mtk_crypto_rfc4309_ccm_encrypt(struct aead_request *req)
+{
+ struct mtk_crypto_cipher_req *creq = aead_request_ctx(req);
+
+ if (req->assoclen != 16 && req->assoclen != 20)
+ return -EINVAL;
+
+ return mtk_crypto_queue_req(&req->base, creq, MTK_CRYPTO_ENCRYPT);
+}
+
+static int mtk_crypto_rfc4309_ccm_decrypt(struct aead_request *req)
+{
+ struct mtk_crypto_cipher_req *creq = aead_request_ctx(req);
+
+ if (req->assoclen != 16 && req->assoclen != 20)
+ return -EINVAL;
+
+ return mtk_crypto_queue_req(&req->base, creq, MTK_CRYPTO_DECRYPT);
+}
+
+static int mtk_crypto_aead_ccm_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ mtk_crypto_aead_cra_init(tfm);
+ ctx->hash_alg = MTK_CRYPTO_ALG_XCBC;
+ ctx->state_sz = 3 * AES_BLOCK_SIZE;
+ ctx->mode = MTK_CRYPTO_MODE_CCM;
+
+ return 0;
+}
+
+static int mtk_crypto_rfc4309_ccm_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+ int ret;
+
+ ret = mtk_crypto_aead_ccm_cra_init(tfm);
+ ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
+ return ret;
+}
+
+struct mtk_crypto_alg_template mtk_crypto_rfc4309_ccm = {
+ .type = MTK_CRYPTO_ALG_TYPE_AEAD,
+ .alg.aead = {
+ .setkey = mtk_crypto_rfc4309_ccm_setkey,
+ .setauthsize = mtk_crypto_rfc4309_ccm_setauthsize,
+ .encrypt = mtk_crypto_rfc4309_ccm_encrypt,
+ .decrypt = mtk_crypto_rfc4309_ccm_decrypt,
+ .ivsize = EIP197_AEAD_IPSEC_IV_SIZE,
+ .maxauthsize = AES_BLOCK_SIZE,
+ .base = {
+ .cra_name = "rfc4309(ccm(aes))",
+ .cra_driver_name = "crypto-eip-rfc4309-ccm-aes",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = 1,
+ .cra_ctxsize = sizeof(struct mtk_crypto_cipher_ctx),
+ .cra_alignmask = 0,
+ .cra_init = mtk_crypto_rfc4309_ccm_cra_init,
+ .cra_exit = mtk_crypto_aead_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+};
+
diff --git a/feed/kernel/crypto-eip/src/lookaside-hash.c b/feed/kernel/crypto-eip/src/lookaside-hash.c
new file mode 100644
index 0000000..f831006
--- /dev/null
+++ b/feed/kernel/crypto-eip/src/lookaside-hash.c
@@ -0,0 +1,1573 @@
+// SPDX-License-Identifier: GPL-2.0-or-later
+/*
+ * Copyright (C) 2023 MediaTek Inc.
+ *
+ * Author: Chris.Chou <chris.chou@mediatek.com>
+ * Ren-Ting Wang <ren-ting.wang@mediatek.com>
+ */
+
+#include <linux/bitops.h>
+#include <crypto/aes.h>
+#include <crypto/hmac.h>
+#include <crypto/skcipher.h>
+#include <crypto/internal/skcipher.h>
+#include <crypto/internal/hash.h>
+
+#include "crypto-eip/crypto-eip.h"
+#include "crypto-eip/ddk-wrapper.h"
+#include "crypto-eip/lookaside.h"
+#include "crypto-eip/internal.h"
+
+static inline u64 mtk_crypto_queued_len(struct mtk_crypto_ahash_req *req)
+{
+ return req->len - req->processed;
+}
+
+static int mtk_crypto_ahash_enqueue(struct ahash_request *areq)
+{
+ struct mtk_crypto_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
+ struct mtk_crypto_priv *priv = ctx->priv;
+ int ret;
+
+ spin_lock_bh(&priv->mtk_eip_queue.queue_lock);
+ ret = crypto_enqueue_request(&priv->mtk_eip_queue.queue, &areq->base);
+ spin_unlock_bh(&priv->mtk_eip_queue.queue_lock);
+
+ queue_work(priv->mtk_eip_queue.workqueue,
+ &priv->mtk_eip_queue.work_data.work);
+
+ return ret;
+}
+
+static int mtk_crypto_ahash_send(struct crypto_async_request *async)
+{
+ struct ahash_request *areq = ahash_request_cast(async);
+ struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
+ struct mtk_crypto_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
+ int cache_len;
+ int extra = 0;
+ int areq_shift;
+ u64 queued;
+ u64 len;
+ bool ret;
+ uint8_t *cur_req;
+ int i;
+
+ if (req->hmac_zlen)
+ goto zero_length_hmac;
+ areq_shift = 0;
+ queued = mtk_crypto_queued_len(req);
+ if (queued <= HASH_CACHE_SIZE)
+ cache_len = queued;
+ else
+ cache_len = queued - areq->nbytes;
+
+ if (!req->finish && !req->last_req) {
+ /* If this is not the last request and the queued data does not
+ * fit into full cache blocks, cache it for the next send call.
+ */
+ extra = queued & (HASH_CACHE_SIZE - 1);
+
+ if (!extra)
+ extra = HASH_CACHE_SIZE;
+
+ sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
+ req->cache_next, extra, areq->nbytes - extra);
+
+ queued -= extra;
+
+ if (!queued)
+ return 0;
+
+ extra = 0;
+ }
+
+ len = queued;
+ cur_req = kmalloc(sizeof(uint8_t) * len + AES_BLOCK_SIZE, GFP_KERNEL);
+ if (!cur_req) {
+ CRYPTO_ERR("alloc buffer for ahash request failed\n");
+ goto exit;
+ }
+ /* Send request to EIP197 */
+ if (cache_len) {
+ memcpy(cur_req, req->cache, cache_len);
+ queued -= cache_len;
+ }
+ if (queued)
+ sg_copy_to_buffer(areq->src, sg_nents(areq->src), cur_req + cache_len, queued);
+
+ if (unlikely(req->xcbcmac)) {
+ int pad_size;
+ int offset;
+ int new;
+
+ if (req->finish) {
+ new = len % AES_BLOCK_SIZE;
+ pad_size = AES_BLOCK_SIZE - new;
+ offset = (len - new) / sizeof(u32);
+
+ if (pad_size != AES_BLOCK_SIZE) {
+ memset(cur_req + len, 0, pad_size);
+ cur_req[len] = 0x80;
+ for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
+ ((__be32 *) cur_req)[offset + i] ^=
+ cpu_to_be32(le32_to_cpu(
+ ctx->ipad[i + 4]));
+ }
+ } else {
+ for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
+ ((__be32 *) cur_req)[offset - 4 + i] ^=
+ cpu_to_be32(le32_to_cpu(
+ ctx->ipad[i]));
+ pad_size = 0;
+ }
+ }
+
+ ret = crypto_ahash_aes_cbc(async, req, cur_req, len + pad_size);
+ kfree(cur_req);
+ if (ret) {
+ if (req->sa_pointer)
+ crypto_free_sa(req->sa_pointer);
+ kfree(req->token_context);
+ CRYPTO_ERR("Fail on ahash_aes_cbc process\n");
+ goto exit;
+ }
+ req->not_first = true;
+ req->processed += len - extra;
+
+ return 0;
+ }
+
+ if (req->not_first)
+ ret = crypto_ahash_token_req(async, req, cur_req, len, req->finish);
+ else
+ ret = crypto_first_ahash_req(async, req, cur_req, len, req->finish);
+
+ kfree(cur_req);
+
+ if (ret) {
+ if (req->sa_pointer)
+ crypto_free_sa(req->sa_pointer);
+ kfree(req->token_context);
+ CRYPTO_ERR("Fail on ahash_req process\n");
+ goto exit;
+ }
+ req->not_first = true;
+ req->processed += len - extra;
+
+ return 0;
+
+zero_length_hmac:
+ if (req->sa_pointer)
+ crypto_free_sa(req->sa_pointer);
+ kfree(req->token_context);
+
+ /* complete the final hash with opad for hmac*/
+ if (req->hmac) {
+ req->sa_pointer = ctx->opad_sa;
+ req->token_context = ctx->opad_token;
+ ret = crypto_ahash_token_req(async, req, (uint8_t *) req->state,
+ req->digest_sz, true);
+ }
+
+ return 0;
+exit:
+ async->complete(async, ret);
+
+ return 0;
+}
+
+static int mtk_crypto_ahash_handle_result(struct mtk_crypto_result *res, int err)
+{
+ struct crypto_async_request *async = res->async;
+ struct ahash_request *areq = ahash_request_cast(async);
+ struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
+ struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
+ int cache_len;
+
+ if (req->xcbcmac) {
+ memcpy(req->state, res->dst + res->size - AES_BLOCK_SIZE, AES_BLOCK_SIZE);
+ crypto_free_sa(res->eip.sa);
+ kfree(res->eip.token_context);
+ } else
+ memcpy(req->state, res->dst, req->digest_sz);
+
+ crypto_free_token(res->eip.token);
+ crypto_free_pkt(res->eip.pkt_handle);
+
+ if (req->finish) {
+ if (req->hmac && !req->hmac_zlen) {
+ req->hmac_zlen = true;
+ mtk_crypto_ahash_enqueue(areq);
+ return 0;
+ }
+ if (req->sa_pointer)
+ crypto_free_sa(req->sa_pointer);
+
+ kfree(req->token_context);
+
+ memcpy(areq->result, req->state, crypto_ahash_digestsize(ahash));
+ }
+
+ cache_len = mtk_crypto_queued_len(req);
+ if (cache_len)
+ memcpy(req->cache, req->cache_next, cache_len);
+ async->complete(async, 0);
+
+ return 0;
+}
+
+static int mtk_crypto_ahash_cache(struct ahash_request *areq)
+{
+ struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
+ u64 cache_len;
+
+ cache_len = mtk_crypto_queued_len(req);
+
+ if (cache_len + areq->nbytes <= HASH_CACHE_SIZE) {
+ sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
+ req->cache + cache_len,
+ areq->nbytes, 0);
+ return 0;
+ }
+
+ return -E2BIG;
+}
+
+static int mtk_crypto_ahash_update(struct ahash_request *areq)
+{
+ struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
+ int ret;
+
+ if (!areq->nbytes)
+ return 0;
+
+ ret = mtk_crypto_ahash_cache(areq);
+
+ req->len += areq->nbytes;
+
+ if ((ret && !req->finish) || req->last_req)
+ return mtk_crypto_ahash_enqueue(areq);
+
+ return 0;
+}
+
+static int mtk_crypto_ahash_final(struct ahash_request *areq)
+{
+ struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
+ struct mtk_crypto_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
+
+ req->finish = true;
+
+ if (unlikely(!req->len && !areq->nbytes && !req->hmac)) {
+ if (ctx->alg == MTK_CRYPTO_ALG_SHA1)
+ memcpy(areq->result, sha1_zero_message_hash,
+ SHA1_DIGEST_SIZE);
+ else if (ctx->alg == MTK_CRYPTO_ALG_SHA224)
+ memcpy(areq->result, sha224_zero_message_hash,
+ SHA224_DIGEST_SIZE);
+ else if (ctx->alg == MTK_CRYPTO_ALG_SHA256)
+ memcpy(areq->result, sha256_zero_message_hash,
+ SHA256_DIGEST_SIZE);
+ else if (ctx->alg == MTK_CRYPTO_ALG_SHA384)
+ memcpy(areq->result, sha384_zero_message_hash,
+ SHA384_DIGEST_SIZE);
+ else if (ctx->alg == MTK_CRYPTO_ALG_SHA512)
+ memcpy(areq->result, sha512_zero_message_hash,
+ SHA512_DIGEST_SIZE);
+ else if (ctx->alg == MTK_CRYPTO_ALG_MD5)
+ memcpy(areq->result, md5_zero_message_hash,
+ MD5_DIGEST_SIZE);
+
+ return 0;
+ } else if (unlikely(req->digest == MTK_CRYPTO_DIGEST_XCM &&
+ ctx->alg == MTK_CRYPTO_ALG_MD5 && req->len == sizeof(u32) &&
+ !areq->nbytes)) {
+ memcpy(areq->result, ctx->ipad, sizeof(u32));
+ return 0;
+ } else if (unlikely(ctx->cbcmac && req->len == AES_BLOCK_SIZE &&
+ !areq->nbytes)) {
+ memset(areq->result, 0, AES_BLOCK_SIZE);
+ return 0;
+ } else if (unlikely(req->xcbcmac && req->len == AES_BLOCK_SIZE &&
+ !areq->nbytes)) {
+ int i;
+
+ for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
+ ((__be32 *) areq->result)[i] =
+ cpu_to_be32(le32_to_cpu(ctx->ipad[i + 4]));
+ areq->result[0] ^= 0x80;
+ crypto_cipher_encrypt_one(ctx->kaes, areq->result, areq->result);
+ return 0;
+ } else if (unlikely(req->hmac && (req->len == req->block_sz) &&
+ !areq->nbytes)) {
+ memcpy(req->state, ctx->zero_hmac, req->state_sz);
+ req->hmac_zlen = true;
+ } else if (req->hmac) {
+ req->digest = MTK_CRYPTO_DIGEST_HMAC;
+ }
+
+ return mtk_crypto_ahash_enqueue(areq);
+}
+
+static int mtk_crypto_ahash_finup(struct ahash_request *areq)
+{
+ struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
+
+ req->finish = true;
+
+ mtk_crypto_ahash_update(areq);
+ return mtk_crypto_ahash_final(areq);
+}
+
+static int mtk_crypto_ahash_export(struct ahash_request *areq, void *out)
+{
+ struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
+ struct mtk_crypto_ahash_export_state *export = out;
+
+ export->len = req->len;
+ export->processed = req->processed;
+
+ export->digest = req->digest;
+ export->sa_pointer = req->sa_pointer;
+ export->token_context = req->token_context;
+
+ memcpy(export->state, req->state, req->state_sz);
+ memcpy(export->cache, req->cache, HASH_CACHE_SIZE);
+
+ return 0;
+}
+
+static int mtk_crypto_ahash_import(struct ahash_request *areq, const void *in)
+{
+ struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
+ const struct mtk_crypto_ahash_export_state *export = in;
+ int ret;
+
+ ret = crypto_ahash_init(areq);
+ if (ret)
+ return ret;
+
+ req->len = export->len;
+ req->processed = export->processed;
+
+ req->digest = export->digest;
+ req->sa_pointer = export->sa_pointer;
+ req->token_context = export->token_context;
+ if (req->sa_pointer)
+ req->not_first = true;
+
+ memcpy(req->cache, export->cache, HASH_CACHE_SIZE);
+ memcpy(req->state, export->state, req->state_sz);
+
+ return 0;
+}
+
+static int mtk_crypto_ahash_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
+ struct mtk_crypto_alg_template *tmpl =
+ container_of(__crypto_ahash_alg(tfm->__crt_alg),
+ struct mtk_crypto_alg_template, alg.ahash);
+
+ ctx->priv = tmpl->priv;
+ ctx->base.send = mtk_crypto_ahash_send;
+ ctx->base.handle_result = mtk_crypto_ahash_handle_result;
+ crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
+ sizeof(struct mtk_crypto_ahash_req));
+
+ return 0;
+}
+
+static int mtk_crypto_sha1_init(struct ahash_request *areq)
+{
+ struct mtk_crypto_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
+ struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
+
+ memset(req, 0, sizeof(*req));
+
+ ctx->alg = MTK_CRYPTO_ALG_SHA1;
+ req->digest = MTK_CRYPTO_DIGEST_PRECOMPUTED;
+ req->state_sz = SHA1_DIGEST_SIZE;
+ req->digest_sz = SHA1_DIGEST_SIZE;
+ req->block_sz = SHA1_BLOCK_SIZE;
+
+ return 0;
+}
+
+static int mtk_crypto_sha1_digest(struct ahash_request *areq)
+{
+ int ret = mtk_crypto_sha1_init(areq);
+
+ if (ret)
+ return ret;
+
+ return mtk_crypto_ahash_finup(areq);
+}
+
+static void mtk_crypto_ahash_cra_exit(struct crypto_tfm *tfm)
+{
+}
+
+struct mtk_crypto_alg_template mtk_crypto_sha1 = {
+ .type = MTK_CRYPTO_ALG_TYPE_AHASH,
+ .alg.ahash = {
+ .init = mtk_crypto_sha1_init,
+ .update = mtk_crypto_ahash_update,
+ .final = mtk_crypto_ahash_final,
+ .finup = mtk_crypto_ahash_finup,
+ .digest = mtk_crypto_sha1_digest,
+ .export = mtk_crypto_ahash_export,
+ .import = mtk_crypto_ahash_import,
+ .halg = {
+ .digestsize = SHA1_DIGEST_SIZE,
+ .statesize = sizeof(struct mtk_crypto_ahash_export_state),
+ .base = {
+ .cra_name = "sha1",
+ .cra_driver_name = "crypto-eip-sha1",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = SHA1_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_ahash_ctx),
+ .cra_init = mtk_crypto_ahash_cra_init,
+ .cra_exit = mtk_crypto_ahash_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+ },
+};
+
+static int mtk_crypto_hmac_sha1_init(struct ahash_request *areq)
+{
+ struct mtk_crypto_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
+ struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
+
+ memset(req, 0, sizeof(*req));
+
+ memcpy(req->state, ctx->ipad, SHA1_DIGEST_SIZE);
+ req->sa_pointer = ctx->ipad_sa;
+ req->token_context = ctx->ipad_token;
+ req->not_first = true;
+ req->len = SHA1_BLOCK_SIZE;
+ req->processed = SHA1_BLOCK_SIZE;
+
+ ctx->alg = MTK_CRYPTO_ALG_SHA1;
+ req->digest = MTK_CRYPTO_DIGEST_PRECOMPUTED;
+ req->state_sz = SHA1_DIGEST_SIZE;
+ req->digest_sz = SHA1_DIGEST_SIZE;
+ req->block_sz = SHA1_BLOCK_SIZE;
+ req->hmac = true;
+
+ return 0;
+}
+
+static int mtk_crypto_hmac_sha1_digest(struct ahash_request *areq)
+{
+ int ret = mtk_crypto_hmac_sha1_init(areq);
+
+ if (ret)
+ return ret;
+
+ return mtk_crypto_ahash_finup(areq);
+}
+
+struct mtk_crypto_ahash_result {
+ struct completion completion;
+ int error;
+};
+
+static void mtk_crypto_ahash_complete(struct crypto_async_request *req, int error)
+{
+ struct mtk_crypto_ahash_result *result = req->data;
+
+ if (error == -EINPROGRESS)
+ return;
+
+ result->error = error;
+ complete(&result->completion);
+}
+
+static int mtk_crypto_hmac_init_pad(struct ahash_request *areq, unsigned int blocksize,
+ const u8 *key, unsigned int keylen,
+ u8 *ipad, u8 *opad)
+{
+ struct mtk_crypto_ahash_result result;
+ struct scatterlist sg;
+ int ret, i;
+ u8 *keydup;
+
+ if (keylen <= blocksize) {
+ memcpy(ipad, key, keylen);
+ } else {
+ keydup = kmemdup(key, keylen, GFP_KERNEL);
+ if (!keydup)
+ return -ENOMEM;
+
+ ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
+ mtk_crypto_ahash_complete, &result);
+ sg_init_one(&sg, keydup, keylen);
+ ahash_request_set_crypt(areq, &sg, ipad, keylen);
+ init_completion(&result.completion);
+
+ ret = crypto_ahash_digest(areq);
+ if (ret == -EINPROGRESS || ret == -EBUSY) {
+ wait_for_completion_interruptible(&result.completion);
+ ret = result.error;
+ }
+
+ memzero_explicit(keydup, keylen);
+ kfree(keydup);
+
+ if (ret)
+ return ret;
+
+ keylen = crypto_ahash_digestsize(crypto_ahash_reqtfm(areq));
+ }
+
+ memset(ipad + keylen, 0, blocksize - keylen);
+ memcpy(opad, ipad, blocksize);
+
+ for (i = 0; i < blocksize; i++) {
+ ipad[i] ^= HMAC_IPAD_VALUE;
+ opad[i] ^= HMAC_OPAD_VALUE;
+ }
+
+ return 0;
+}
+
+static int mtk_crypto_hmac_init_iv(struct ahash_request *areq, unsigned int blocksize,
+ u8 *pad, void *state, bool zero)
+{
+ struct mtk_crypto_ahash_result result;
+ struct mtk_crypto_ahash_req *req;
+ struct scatterlist sg;
+ int ret;
+
+ ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
+ mtk_crypto_ahash_complete, &result);
+ sg_init_one(&sg, pad, blocksize);
+ ahash_request_set_crypt(areq, &sg, pad, blocksize);
+ init_completion(&result.completion);
+
+ ret = crypto_ahash_init(areq);
+ if (ret)
+ return ret;
+
+ req = ahash_request_ctx(areq);
+ req->last_req = !zero;
+
+ if (zero)
+ ret = crypto_ahash_finup(areq);
+ else
+ ret = crypto_ahash_update(areq);
+ if (ret && ret != -EINPROGRESS && ret != -EBUSY)
+ return ret;
+
+ wait_for_completion_interruptible(&result.completion);
+ if (result.error)
+ return result.error;
+
+ return crypto_ahash_export(areq, state);
+}
+
+int mtk_crypto_zero_hmac_setkey(const char *alg, const u8 *key, unsigned int keylen,
+ void *istate)
+{
+ struct ahash_request *areq;
+ struct crypto_ahash *tfm;
+ unsigned int blocksize;
+ u8 *ipad, *opad;
+ int ret;
+
+ tfm = crypto_alloc_ahash(alg, 0, 0);
+ if (IS_ERR(tfm))
+ return PTR_ERR(tfm);
+
+ areq = ahash_request_alloc(tfm, GFP_KERNEL);
+ if (!areq) {
+ ret = -ENOMEM;
+ goto free_ahash;
+ }
+
+ crypto_ahash_clear_flags(tfm, ~0);
+ blocksize = crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
+
+ ipad = kcalloc(2, blocksize, GFP_KERNEL);
+ if (!ipad) {
+ ret = -ENOMEM;
+ goto free_request;
+ }
+
+ opad = ipad + blocksize;
+
+ ret = mtk_crypto_hmac_init_pad(areq, blocksize, key, keylen, ipad, opad);
+ if (ret)
+ goto free_ipad;
+
+ ret = mtk_crypto_hmac_init_iv(areq, blocksize, ipad, istate, true);
+ if (ret)
+ goto free_ipad;
+
+free_ipad:
+ kfree(ipad);
+free_request:
+ ahash_request_free(areq);
+free_ahash:
+ crypto_free_ahash(tfm);
+
+ return ret;
+}
+
+int mtk_crypto_hmac_setkey(const char *alg, const u8 *key, unsigned int keylen,
+ void *istate, void *ostate)
+{
+ struct ahash_request *areq;
+ struct crypto_ahash *tfm;
+ unsigned int blocksize;
+ u8 *ipad, *opad;
+ int ret;
+
+ tfm = crypto_alloc_ahash(alg, 0, 0);
+ if (IS_ERR(tfm))
+ return PTR_ERR(tfm);
+
+ areq = ahash_request_alloc(tfm, GFP_KERNEL);
+ if (!areq) {
+ ret = -ENOMEM;
+ goto free_ahash;
+ }
+
+ crypto_ahash_clear_flags(tfm, ~0);
+ blocksize = crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
+
+ ipad = kcalloc(2, blocksize, GFP_KERNEL);
+ if (!ipad) {
+ ret = -ENOMEM;
+ goto free_request;
+ }
+
+ opad = ipad + blocksize;
+
+ ret = mtk_crypto_hmac_init_pad(areq, blocksize, key, keylen, ipad, opad);
+ if (ret)
+ goto free_ipad;
+
+ ret = mtk_crypto_hmac_init_iv(areq, blocksize, ipad, istate, false);
+ if (ret)
+ goto free_ipad;
+
+ ret = mtk_crypto_hmac_init_iv(areq, blocksize, opad, ostate, false);
+
+free_ipad:
+ kfree(ipad);
+free_request:
+ ahash_request_free(areq);
+free_ahash:
+ crypto_free_ahash(tfm);
+
+ return ret;
+}
+
+static int mtk_crypto_hmac_alg_setkey(struct crypto_ahash *tfm, const u8 *key,
+ unsigned int keylen, const char *alg,
+ unsigned int state_sz)
+{
+ struct mtk_crypto_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
+ struct mtk_crypto_ahash_export_state istate, ostate, zeroi;
+ int ret;
+
+ ret = mtk_crypto_hmac_setkey(alg, key, keylen, &istate, &ostate);
+ if (ret)
+ return ret;
+
+ ret = mtk_crypto_zero_hmac_setkey(alg, key, keylen, &zeroi);
+ if (ret)
+ return ret;
+
+ memcpy(ctx->zero_hmac, &zeroi.state, state_sz);
+ memcpy(ctx->ipad, &istate.state, state_sz);
+ memcpy(ctx->opad, &ostate.state, state_sz);
+ ctx->ipad_sa = istate.sa_pointer;
+ ctx->ipad_token = istate.token_context;
+ ctx->opad_sa = ostate.sa_pointer;
+ ctx->opad_token = ostate.token_context;
+
+ return 0;
+}
+
+static int mtk_crypto_hmac_sha1_setkey(struct crypto_ahash *tfm, const u8 *key,
+ unsigned int keylen)
+{
+ return mtk_crypto_hmac_alg_setkey(tfm, key, keylen, "crypto-eip-sha1",
+ SHA1_DIGEST_SIZE);
+}
+
+struct mtk_crypto_alg_template mtk_crypto_hmac_sha1 = {
+ .type = MTK_CRYPTO_ALG_TYPE_AHASH,
+ .alg.ahash = {
+ .init = mtk_crypto_hmac_sha1_init,
+ .update = mtk_crypto_ahash_update,
+ .final = mtk_crypto_ahash_final,
+ .finup = mtk_crypto_ahash_finup,
+ .digest = mtk_crypto_hmac_sha1_digest,
+ .setkey = mtk_crypto_hmac_sha1_setkey,
+ .export = mtk_crypto_ahash_export,
+ .import = mtk_crypto_ahash_import,
+ .halg = {
+ .digestsize = SHA1_DIGEST_SIZE,
+ .statesize = sizeof(struct mtk_crypto_ahash_export_state),
+ .base = {
+ .cra_name = "hmac(sha1)",
+ .cra_driver_name = "crypto-eip-hmac-sha1",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = SHA1_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_ahash_ctx),
+ .cra_init = mtk_crypto_ahash_cra_init,
+ .cra_exit = mtk_crypto_ahash_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+ },
+};
+
+static int mtk_crypto_sha256_init(struct ahash_request *areq)
+{
+ struct mtk_crypto_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
+ struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
+
+ memset(req, 0, sizeof(*req));
+
+ ctx->alg = MTK_CRYPTO_ALG_SHA256;
+ req->digest = MTK_CRYPTO_DIGEST_PRECOMPUTED;
+ req->state_sz = SHA256_DIGEST_SIZE;
+ req->digest_sz = SHA256_DIGEST_SIZE;
+ req->block_sz = SHA256_BLOCK_SIZE;
+
+ return 0;
+}
+
+static int mtk_crypto_sha256_digest(struct ahash_request *areq)
+{
+ int ret = mtk_crypto_sha256_init(areq);
+
+ if (ret)
+ return ret;
+
+ return mtk_crypto_ahash_finup(areq);
+}
+
+struct mtk_crypto_alg_template mtk_crypto_sha256 = {
+ .type = MTK_CRYPTO_ALG_TYPE_AHASH,
+ .alg.ahash = {
+ .init = mtk_crypto_sha256_init,
+ .update = mtk_crypto_ahash_update,
+ .final = mtk_crypto_ahash_final,
+ .finup = mtk_crypto_ahash_finup,
+ .digest = mtk_crypto_sha256_digest,
+ .export = mtk_crypto_ahash_export,
+ .import = mtk_crypto_ahash_import,
+ .halg = {
+ .digestsize = SHA256_DIGEST_SIZE,
+ .statesize = sizeof(struct mtk_crypto_ahash_export_state),
+ .base = {
+ .cra_name = "sha256",
+ .cra_driver_name = "crypto-eip-sha256",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = SHA256_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_ahash_ctx),
+ .cra_init = mtk_crypto_ahash_cra_init,
+ .cra_exit = mtk_crypto_ahash_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+ },
+};
+
+static int mtk_crypto_sha224_init(struct ahash_request *areq)
+{
+ struct mtk_crypto_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
+ struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
+
+ memset(req, 0, sizeof(*req));
+
+ ctx->alg = MTK_CRYPTO_ALG_SHA224;
+ req->digest = MTK_CRYPTO_DIGEST_PRECOMPUTED;
+ req->state_sz = SHA256_DIGEST_SIZE;
+ req->digest_sz = SHA256_DIGEST_SIZE;
+ req->block_sz = SHA256_BLOCK_SIZE;
+
+ return 0;
+}
+
+static int mtk_crypto_sha224_digest(struct ahash_request *areq)
+{
+ int ret = mtk_crypto_sha224_init(areq);
+
+ if (ret)
+ return ret;
+
+ return mtk_crypto_ahash_finup(areq);
+}
+
+struct mtk_crypto_alg_template mtk_crypto_sha224 = {
+ .type = MTK_CRYPTO_ALG_TYPE_AHASH,
+ .alg.ahash = {
+ .init = mtk_crypto_sha224_init,
+ .update = mtk_crypto_ahash_update,
+ .final = mtk_crypto_ahash_final,
+ .finup = mtk_crypto_ahash_finup,
+ .digest = mtk_crypto_sha224_digest,
+ .export = mtk_crypto_ahash_export,
+ .import = mtk_crypto_ahash_import,
+ .halg = {
+ .digestsize = SHA224_DIGEST_SIZE,
+ .statesize = sizeof(struct mtk_crypto_ahash_export_state),
+ .base = {
+ .cra_name = "sha224",
+ .cra_driver_name = "crypto-eip-sha224",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = SHA224_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_ahash_ctx),
+ .cra_init = mtk_crypto_ahash_cra_init,
+ .cra_exit = mtk_crypto_ahash_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+ },
+};
+
+static int mtk_crypto_hmac_sha224_setkey(struct crypto_ahash *tfm, const u8 *key,
+ unsigned int keylen)
+{
+ return mtk_crypto_hmac_alg_setkey(tfm, key, keylen, "crypto-eip-sha224",
+ SHA256_DIGEST_SIZE);
+}
+
+static int mtk_crypto_hmac_sha224_init(struct ahash_request *areq)
+{
+ struct mtk_crypto_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
+ struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
+
+ memset(req, 0, sizeof(*req));
+
+ memcpy(req->state, ctx->ipad, SHA224_DIGEST_SIZE);
+
+ req->sa_pointer = ctx->ipad_sa;
+ req->token_context = ctx->ipad_token;
+ req->not_first = true;
+ req->len = SHA224_BLOCK_SIZE;
+ req->processed = SHA224_BLOCK_SIZE;
+
+ ctx->alg = MTK_CRYPTO_ALG_SHA224;
+ req->digest = MTK_CRYPTO_DIGEST_PRECOMPUTED;
+ req->state_sz = SHA224_DIGEST_SIZE;
+ req->digest_sz = SHA224_DIGEST_SIZE;
+ req->block_sz = SHA224_BLOCK_SIZE;
+ req->hmac = true;
+
+ return 0;
+}
+
+static int mtk_crypto_hmac_sha224_digest(struct ahash_request *areq)
+{
+ int ret = mtk_crypto_hmac_sha224_init(areq);
+
+ if (ret)
+ return ret;
+ return mtk_crypto_ahash_finup(areq);
+}
+
+struct mtk_crypto_alg_template mtk_crypto_hmac_sha224 = {
+ .type = MTK_CRYPTO_ALG_TYPE_AHASH,
+ .alg.ahash = {
+ .init = mtk_crypto_hmac_sha224_init,
+ .update = mtk_crypto_ahash_update,
+ .final = mtk_crypto_ahash_final,
+ .finup = mtk_crypto_ahash_finup,
+ .digest = mtk_crypto_hmac_sha224_digest,
+ .setkey = mtk_crypto_hmac_sha224_setkey,
+ .export = mtk_crypto_ahash_export,
+ .import = mtk_crypto_ahash_import,
+ .halg = {
+ .digestsize = SHA224_DIGEST_SIZE,
+ .statesize = sizeof(struct mtk_crypto_ahash_export_state),
+ .base = {
+ .cra_name = "hmac(sha224)",
+ .cra_driver_name = "crypto-eip-hmac-sha224",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = SHA224_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_ahash_ctx),
+ .cra_init = mtk_crypto_ahash_cra_init,
+ .cra_exit = mtk_crypto_ahash_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+ },
+};
+
+static int mtk_crypto_hmac_sha256_setkey(struct crypto_ahash *tfm, const u8 *key,
+ unsigned int keylen)
+{
+ return mtk_crypto_hmac_alg_setkey(tfm, key, keylen, "crypto-eip-sha256",
+ SHA256_DIGEST_SIZE);
+}
+
+static int mtk_crypto_hmac_sha256_init(struct ahash_request *areq)
+{
+ struct mtk_crypto_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
+ struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
+
+ memset(req, 0, sizeof(*req));
+
+ memcpy(req->state, ctx->ipad, SHA256_DIGEST_SIZE);
+ req->sa_pointer = ctx->ipad_sa;
+ req->token_context = ctx->ipad_token;
+ req->not_first = true;
+ req->len = SHA256_BLOCK_SIZE;
+ req->processed = SHA256_BLOCK_SIZE;
+
+ ctx->alg = MTK_CRYPTO_ALG_SHA256;
+ req->digest = MTK_CRYPTO_DIGEST_PRECOMPUTED;
+ req->state_sz = SHA256_DIGEST_SIZE;
+ req->digest_sz = SHA256_DIGEST_SIZE;
+ req->block_sz = SHA256_BLOCK_SIZE;
+ req->hmac = true;
+
+ return 0;
+}
+
+static int mtk_crypto_hmac_sha256_digest(struct ahash_request *areq)
+{
+ int ret = mtk_crypto_hmac_sha256_init(areq);
+
+ if (ret)
+ return ret;
+ return mtk_crypto_ahash_finup(areq);
+}
+
+struct mtk_crypto_alg_template mtk_crypto_hmac_sha256 = {
+ .type = MTK_CRYPTO_ALG_TYPE_AHASH,
+ .alg.ahash = {
+ .init = mtk_crypto_hmac_sha256_init,
+ .update = mtk_crypto_ahash_update,
+ .final = mtk_crypto_ahash_final,
+ .finup = mtk_crypto_ahash_finup,
+ .digest = mtk_crypto_hmac_sha256_digest,
+ .setkey = mtk_crypto_hmac_sha256_setkey,
+ .export = mtk_crypto_ahash_export,
+ .import = mtk_crypto_ahash_import,
+ .halg = {
+ .digestsize = SHA256_DIGEST_SIZE,
+ .statesize = sizeof(struct mtk_crypto_ahash_export_state),
+ .base = {
+ .cra_name = "hmac(sha256)",
+ .cra_driver_name = "crypto-eip-hmac-sha256",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = SHA256_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_ahash_ctx),
+ .cra_init = mtk_crypto_ahash_cra_init,
+ .cra_exit = mtk_crypto_ahash_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+ },
+};
+
+static int mtk_crypto_sha512_init(struct ahash_request *areq)
+{
+ struct mtk_crypto_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
+ struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
+
+ memset(req, 0, sizeof(*req));
+
+ ctx->alg = MTK_CRYPTO_ALG_SHA512;
+ req->digest = MTK_CRYPTO_DIGEST_PRECOMPUTED;
+ req->state_sz = SHA512_DIGEST_SIZE;
+ req->digest_sz = SHA512_DIGEST_SIZE;
+ req->block_sz = SHA512_BLOCK_SIZE;
+
+ return 0;
+}
+
+static int mtk_crypto_sha512_digest(struct ahash_request *areq)
+{
+ int ret = mtk_crypto_sha512_init(areq);
+
+ if (ret)
+ return ret;
+ return mtk_crypto_ahash_finup(areq);
+}
+
+struct mtk_crypto_alg_template mtk_crypto_sha512 = {
+ .type = MTK_CRYPTO_ALG_TYPE_AHASH,
+ .alg.ahash = {
+ .init = mtk_crypto_sha512_init,
+ .update = mtk_crypto_ahash_update,
+ .final = mtk_crypto_ahash_final,
+ .finup = mtk_crypto_ahash_finup,
+ .digest = mtk_crypto_sha512_digest,
+ .export = mtk_crypto_ahash_export,
+ .import = mtk_crypto_ahash_import,
+ .halg = {
+ .digestsize = SHA512_DIGEST_SIZE,
+ .statesize = sizeof(struct mtk_crypto_ahash_ctx),
+ .base = {
+ .cra_name = "sha512",
+ .cra_driver_name = "crypto-eip-sha512",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = SHA512_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_ahash_ctx),
+ .cra_init = mtk_crypto_ahash_cra_init,
+ .cra_exit = mtk_crypto_ahash_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+ },
+};
+
+static int mtk_crypto_sha384_init(struct ahash_request *areq)
+{
+ struct mtk_crypto_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
+ struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
+
+ memset(req, 0, sizeof(*req));
+
+ ctx->alg = MTK_CRYPTO_ALG_SHA384;
+ req->digest = MTK_CRYPTO_DIGEST_PRECOMPUTED;
+ req->state_sz = SHA512_DIGEST_SIZE;
+ req->digest_sz = SHA512_DIGEST_SIZE;
+ req->block_sz = SHA384_BLOCK_SIZE;
+
+ return 0;
+}
+
+static int mtk_crypto_sha384_digest(struct ahash_request *areq)
+{
+ int ret = mtk_crypto_sha384_init(areq);
+
+ if (ret)
+ return ret;
+
+ return mtk_crypto_ahash_finup(areq);
+}
+
+struct mtk_crypto_alg_template mtk_crypto_sha384 = {
+ .type = MTK_CRYPTO_ALG_TYPE_AHASH,
+ .alg.ahash = {
+ .init = mtk_crypto_sha384_init,
+ .update = mtk_crypto_ahash_update,
+ .final = mtk_crypto_ahash_final,
+ .finup = mtk_crypto_ahash_finup,
+ .digest = mtk_crypto_sha384_digest,
+ .export = mtk_crypto_ahash_export,
+ .import = mtk_crypto_ahash_import,
+ .halg = {
+ .digestsize = SHA384_DIGEST_SIZE,
+ .statesize = sizeof(struct mtk_crypto_ahash_ctx),
+ .base = {
+ .cra_name = "sha384",
+ .cra_driver_name = "crypto-eip-sha384",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = SHA384_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_ahash_ctx),
+ .cra_init = mtk_crypto_ahash_cra_init,
+ .cra_exit = mtk_crypto_ahash_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+ },
+};
+
+static int mtk_crypto_hmac_sha512_setkey(struct crypto_ahash *tfm, const u8 *key,
+ unsigned int keylen)
+{
+ return mtk_crypto_hmac_alg_setkey(tfm, key, keylen, "crypto-eip-sha512",
+ SHA512_DIGEST_SIZE);
+}
+
+static int mtk_crypto_hmac_sha512_init(struct ahash_request *areq)
+{
+ struct mtk_crypto_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
+ struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
+
+ memset(req, 0, sizeof(*req));
+
+ memcpy(req->state, ctx->ipad, SHA512_DIGEST_SIZE);
+ req->sa_pointer = ctx->ipad_sa;
+ req->token_context = ctx->ipad_token;
+ req->not_first = true;
+ req->len = SHA512_BLOCK_SIZE;
+ req->processed = SHA512_BLOCK_SIZE;
+
+ ctx->alg = MTK_CRYPTO_ALG_SHA512;
+ req->digest = MTK_CRYPTO_DIGEST_PRECOMPUTED;
+ req->state_sz = SHA512_DIGEST_SIZE;
+ req->digest_sz = SHA512_DIGEST_SIZE;
+ req->block_sz = SHA512_BLOCK_SIZE;
+ req->hmac = true;
+
+ return 0;
+}
+
+static int mtk_crypto_hmac_sha512_digest(struct ahash_request *areq)
+{
+ int ret = mtk_crypto_hmac_sha512_init(areq);
+
+ if (ret)
+ return ret;
+
+ return mtk_crypto_ahash_finup(areq);
+}
+
+struct mtk_crypto_alg_template mtk_crypto_hmac_sha512 = {
+ .type = MTK_CRYPTO_ALG_TYPE_AHASH,
+ .alg.ahash = {
+ .init = mtk_crypto_hmac_sha512_init,
+ .update = mtk_crypto_ahash_update,
+ .final = mtk_crypto_ahash_final,
+ .finup = mtk_crypto_ahash_finup,
+ .digest = mtk_crypto_hmac_sha512_digest,
+ .setkey = mtk_crypto_hmac_sha512_setkey,
+ .export = mtk_crypto_ahash_export,
+ .import = mtk_crypto_ahash_import,
+ .halg = {
+ .digestsize = SHA512_DIGEST_SIZE,
+ .statesize = sizeof(struct mtk_crypto_ahash_export_state),
+ .base = {
+ .cra_name = "hmac(sha512)",
+ .cra_driver_name = "crypto-eip-hmac-sha512",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = SHA512_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_ahash_ctx),
+ .cra_init = mtk_crypto_ahash_cra_init,
+ .cra_exit = mtk_crypto_ahash_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+ },
+};
+
+static int mtk_crypto_hmac_sha384_setkey(struct crypto_ahash *tfm, const u8 *key,
+ unsigned int keylen)
+{
+ return mtk_crypto_hmac_alg_setkey(tfm, key, keylen, "crypto-eip-sha384",
+ SHA512_DIGEST_SIZE);
+}
+
+static int mtk_crypto_hmac_sha384_init(struct ahash_request *areq)
+{
+ struct mtk_crypto_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
+ struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
+
+ memset(req, 0, sizeof(*req));
+
+ memcpy(req->state, ctx->ipad, SHA384_DIGEST_SIZE);
+ req->sa_pointer = ctx->ipad_sa;
+ req->token_context = ctx->ipad_token;
+ req->not_first = true;
+ req->len = SHA384_BLOCK_SIZE;
+ req->processed = SHA384_BLOCK_SIZE;
+
+ ctx->alg = MTK_CRYPTO_ALG_SHA384;
+ req->digest = MTK_CRYPTO_DIGEST_PRECOMPUTED;
+ req->state_sz = SHA384_DIGEST_SIZE;
+ req->digest_sz = SHA384_DIGEST_SIZE;
+ req->block_sz = SHA384_BLOCK_SIZE;
+ req->hmac = true;
+
+ return 0;
+}
+
+static int mtk_crypto_hmac_sha384_digest(struct ahash_request *areq)
+{
+ int ret = mtk_crypto_hmac_sha384_init(areq);
+
+ if (ret)
+ return ret;
+
+ return mtk_crypto_ahash_finup(areq);
+}
+
+struct mtk_crypto_alg_template mtk_crypto_hmac_sha384 = {
+ .type = MTK_CRYPTO_ALG_TYPE_AHASH,
+ .alg.ahash = {
+ .init = mtk_crypto_hmac_sha384_init,
+ .update = mtk_crypto_ahash_update,
+ .final = mtk_crypto_ahash_final,
+ .finup = mtk_crypto_ahash_finup,
+ .digest = mtk_crypto_hmac_sha384_digest,
+ .setkey = mtk_crypto_hmac_sha384_setkey,
+ .export = mtk_crypto_ahash_export,
+ .import = mtk_crypto_ahash_import,
+ .halg = {
+ .digestsize = SHA384_DIGEST_SIZE,
+ .statesize = sizeof(struct mtk_crypto_ahash_export_state),
+ .base = {
+ .cra_name = "hmac(sha384)",
+ .cra_driver_name = "crypto-eip-hmac-sha384",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = SHA384_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_ahash_ctx),
+ .cra_init = mtk_crypto_ahash_cra_init,
+ .cra_exit = mtk_crypto_ahash_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+ },
+};
+
+static int mtk_crypto_md5_init(struct ahash_request *areq)
+{
+ struct mtk_crypto_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
+ struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
+
+ memset(req, 0, sizeof(*req));
+
+ ctx->alg = MTK_CRYPTO_ALG_MD5;
+ req->digest = MTK_CRYPTO_DIGEST_PRECOMPUTED;
+ req->state_sz = MD5_DIGEST_SIZE;
+ req->digest_sz = MD5_DIGEST_SIZE;
+ req->block_sz = MD5_HMAC_BLOCK_SIZE;
+
+ return 0;
+}
+
+static int mtk_crypto_md5_digest(struct ahash_request *areq)
+{
+ int ret = mtk_crypto_md5_init(areq);
+
+ if (ret)
+ return ret;
+
+ return mtk_crypto_ahash_finup(areq);
+}
+
+struct mtk_crypto_alg_template mtk_crypto_md5 = {
+ .type = MTK_CRYPTO_ALG_TYPE_AHASH,
+ .alg.ahash = {
+ .init = mtk_crypto_md5_init,
+ .update = mtk_crypto_ahash_update,
+ .final = mtk_crypto_ahash_final,
+ .finup = mtk_crypto_ahash_finup,
+ .digest = mtk_crypto_md5_digest,
+ .export = mtk_crypto_ahash_export,
+ .import = mtk_crypto_ahash_import,
+ .halg = {
+ .digestsize = MD5_DIGEST_SIZE,
+ .statesize = sizeof(struct mtk_crypto_ahash_export_state),
+ .base = {
+ .cra_name = "md5",
+ .cra_driver_name = "crypto-eip-md5",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_ahash_ctx),
+ .cra_init = mtk_crypto_ahash_cra_init,
+ .cra_exit = mtk_crypto_ahash_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+ },
+};
+
+static int mtk_crypto_hmac_md5_init(struct ahash_request *areq)
+{
+ struct mtk_crypto_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
+ struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
+
+ memset(req, 0, sizeof(*req));
+
+ memcpy(req->state, ctx->ipad, MD5_DIGEST_SIZE);
+ req->sa_pointer = ctx->ipad_sa;
+ req->token_context = ctx->ipad_token;
+ req->not_first = true;
+ req->len = MD5_HMAC_BLOCK_SIZE;
+ req->processed = MD5_HMAC_BLOCK_SIZE;
+
+ ctx->alg = MTK_CRYPTO_ALG_MD5;
+ req->digest = MTK_CRYPTO_DIGEST_PRECOMPUTED;
+ req->state_sz = MD5_DIGEST_SIZE;
+ req->digest_sz = MD5_DIGEST_SIZE;
+ req->block_sz = MD5_HMAC_BLOCK_SIZE;
+ req->hmac = true;
+
+ return 0;
+}
+
+static int mtk_crypto_hmac_md5_setkey(struct crypto_ahash *tfm, const u8 *key,
+ unsigned int keylen)
+{
+ return mtk_crypto_hmac_alg_setkey(tfm, key, keylen, "crypto-eip-md5",
+ MD5_DIGEST_SIZE);
+}
+
+static int mtk_crypto_hmac_md5_digest(struct ahash_request *areq)
+{
+ int ret = mtk_crypto_hmac_md5_init(areq);
+
+ if (ret)
+ return ret;
+
+ return mtk_crypto_ahash_finup(areq);
+}
+
+struct mtk_crypto_alg_template mtk_crypto_hmac_md5 = {
+ .type = MTK_CRYPTO_ALG_TYPE_AHASH,
+ .alg.ahash = {
+ .init = mtk_crypto_hmac_md5_init,
+ .update = mtk_crypto_ahash_update,
+ .final = mtk_crypto_ahash_final,
+ .finup = mtk_crypto_ahash_finup,
+ .digest = mtk_crypto_hmac_md5_digest,
+ .setkey = mtk_crypto_hmac_md5_setkey,
+ .export = mtk_crypto_ahash_export,
+ .import = mtk_crypto_ahash_import,
+ .halg = {
+ .digestsize = MD5_DIGEST_SIZE,
+ .statesize = sizeof(struct mtk_crypto_ahash_export_state),
+ .base = {
+ .cra_name = "hmac(md5)",
+ .cra_driver_name = "crypto-eip-hmac-md5",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_ahash_ctx),
+ .cra_init = mtk_crypto_ahash_cra_init,
+ .cra_exit = mtk_crypto_ahash_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+ },
+};
+
+static int mtk_crypto_cbcmac_init(struct ahash_request *areq)
+{
+ struct mtk_crypto_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
+ struct mtk_crypto_ahash_req *req = ahash_request_ctx(areq);
+
+ memset(req, 0, sizeof(*req));
+ memset(req->state, 0, sizeof(u32) * (SHA512_DIGEST_SIZE / sizeof(u32)));
+
+ req->len = AES_BLOCK_SIZE;
+ req->processed = AES_BLOCK_SIZE;
+
+ req->digest = MTK_CRYPTO_DIGEST_XCM;
+ req->state_sz = ctx->key_sz;
+ req->digest_sz = AES_BLOCK_SIZE;
+ req->block_sz = AES_BLOCK_SIZE;
+ req->xcbcmac = true;
+
+ return 0;
+}
+
+static int mtk_crypto_cbcmac_digest(struct ahash_request *areq)
+{
+ return mtk_crypto_cbcmac_init(areq) ?: mtk_crypto_ahash_finup(areq);
+}
+
+static int mtk_crypto_xcbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
+ unsigned int len)
+{
+ struct mtk_crypto_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
+ struct crypto_aes_ctx aes;
+ u32 key_tmp[3 * AES_BLOCK_SIZE / sizeof(u32)];
+ int ret, i;
+
+ ret = aes_expandkey(&aes, key, len);
+ if (ret)
+ return ret;
+
+ crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
+ crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) & CRYPTO_TFM_REQ_MASK);
+
+ ret = crypto_cipher_setkey(ctx->kaes, key, len);
+ if (ret)
+ return ret;
+
+ crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
+ "\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1");
+ crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp,
+ "\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2");
+ crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + AES_BLOCK_SIZE,
+ "\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3");
+
+ for (i = 0; i < 3 * AES_BLOCK_SIZE / sizeof(u32); i++)
+ ctx->ipad[i] =
+ (__force u32)cpu_to_be32(key_tmp[i]);
+
+ crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
+ crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
+ CRYPTO_TFM_REQ_MASK);
+ ret = crypto_cipher_setkey(ctx->kaes, (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
+ AES_MIN_KEY_SIZE);
+
+ if (ret)
+ return ret;
+
+ ctx->alg = MTK_CRYPTO_ALG_XCBC;
+ ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
+ ctx->cbcmac = false;
+
+ memzero_explicit(&aes, sizeof(aes));
+ return 0;
+}
+
+static int mtk_crypto_xcbcmac_cra_init(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ mtk_crypto_ahash_cra_init(tfm);
+ ctx->kaes = crypto_alloc_cipher("aes", 0, 0);
+ return PTR_ERR_OR_ZERO(ctx->kaes);
+}
+
+static void mtk_crypto_xcbcmac_cra_exit(struct crypto_tfm *tfm)
+{
+ struct mtk_crypto_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ crypto_free_cipher(ctx->kaes);
+ mtk_crypto_ahash_cra_exit(tfm);
+}
+
+struct mtk_crypto_alg_template mtk_crypto_xcbcmac = {
+ .type = MTK_CRYPTO_ALG_TYPE_AHASH,
+ .alg.ahash = {
+ .init = mtk_crypto_cbcmac_init,
+ .update = mtk_crypto_ahash_update,
+ .final = mtk_crypto_ahash_final,
+ .finup = mtk_crypto_ahash_finup,
+ .digest = mtk_crypto_cbcmac_digest,
+ .setkey = mtk_crypto_xcbcmac_setkey,
+ .export = mtk_crypto_ahash_export,
+ .import = mtk_crypto_ahash_import,
+ .halg = {
+ .digestsize = AES_BLOCK_SIZE,
+ .statesize = sizeof(struct mtk_crypto_ahash_export_state),
+ .base = {
+ .cra_name = "xcbc(aes)",
+ .cra_driver_name = "crypto-eip-xcbc-aes",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = AES_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_ahash_ctx),
+ .cra_init = mtk_crypto_xcbcmac_cra_init,
+ .cra_exit = mtk_crypto_xcbcmac_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+ },
+};
+
+static int mtk_crypto_cmac_setkey(struct crypto_ahash *tfm, const u8 *key,
+ unsigned int len)
+{
+ struct mtk_crypto_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
+ struct crypto_aes_ctx aes;
+ __be64 consts[4];
+ u64 _const[2];
+ u8 msb_mask, gfmask;
+ int ret, i;
+
+ ret = aes_expandkey(&aes, key, len);
+ if (ret)
+ return ret;
+
+ crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
+ crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
+ CRYPTO_TFM_REQ_MASK);
+ ret = crypto_cipher_setkey(ctx->kaes, key, len);
+ if (ret)
+ return ret;
+
+ /* code below borrowed from crypto/cmac.c */
+ /* encrypt the zero block */
+ memset(consts, 0, AES_BLOCK_SIZE);
+ crypto_cipher_encrypt_one(ctx->kaes, (u8 *) consts, (u8 *) consts);
+
+ gfmask = 0x87;
+ _const[0] = be64_to_cpu(consts[1]);
+ _const[1] = be64_to_cpu(consts[0]);
+
+ /* gf(2^128) multiply zero-ciphertext with u and u^2 */
+ for (i = 0; i < 4; i += 2) {
+ msb_mask = ((s64)_const[1] >> 63) & gfmask;
+ _const[1] = (_const[1] << 1) | (_const[0] >> 63);
+ _const[0] = (_const[0] << 1) ^ msb_mask;
+
+ consts[i + 0] = cpu_to_be64(_const[1]);
+ consts[i + 1] = cpu_to_be64(_const[0]);
+ }
+ /* end of code borrowed from crypto/cmac.c */
+
+ for (i = 0; i < 2 * AES_BLOCK_SIZE / sizeof(u32); i++)
+ ctx->ipad[i] = (__force __le32)cpu_to_be32(((u32 *) consts)[i]);
+ memcpy((uint8_t *) ctx->ipad + 2 * AES_BLOCK_SIZE, key, len);
+
+ if (len == AES_KEYSIZE_192) {
+ ctx->alg = MTK_CRYPTO_ALG_CMAC_192;
+ ctx->key_sz = 24 + 2 * AES_BLOCK_SIZE;
+ } else if (len == AES_KEYSIZE_256) {
+ ctx->alg = MTK_CRYPTO_ALG_CMAC_256;
+ ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
+ } else {
+ ctx->alg = MTK_CRYPTO_ALG_CMAC_128;
+ ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
+ }
+ ctx->cbcmac = false;
+
+ memzero_explicit(&aes, sizeof(aes));
+ return 0;
+}
+
+struct mtk_crypto_alg_template mtk_crypto_cmac = {
+ .type = MTK_CRYPTO_ALG_TYPE_AHASH,
+ .alg.ahash = {
+ .init = mtk_crypto_cbcmac_init,
+ .update = mtk_crypto_ahash_update,
+ .final = mtk_crypto_ahash_final,
+ .finup = mtk_crypto_ahash_finup,
+ .digest = mtk_crypto_cbcmac_digest,
+ .setkey = mtk_crypto_cmac_setkey,
+ .export = mtk_crypto_ahash_export,
+ .import = mtk_crypto_ahash_import,
+ .halg = {
+ .digestsize = AES_BLOCK_SIZE,
+ .statesize = sizeof(struct mtk_crypto_ahash_export_state),
+ .base = {
+ .cra_name = "cmac(aes)",
+ .cra_driver_name = "crypto-eip-cmac-aes",
+ .cra_priority = MTK_CRYPTO_PRIORITY,
+ .cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_KERN_DRIVER_ONLY,
+ .cra_blocksize = AES_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct mtk_crypto_ahash_ctx),
+ .cra_init = mtk_crypto_xcbcmac_cra_init,
+ .cra_exit = mtk_crypto_xcbcmac_cra_exit,
+ .cra_module = THIS_MODULE,
+ },
+ },
+ },
+};
+
diff --git a/feed/kernel/crypto-eip/src/lookaside.c b/feed/kernel/crypto-eip/src/lookaside.c
new file mode 100644
index 0000000..7eae63d
--- /dev/null
+++ b/feed/kernel/crypto-eip/src/lookaside.c
@@ -0,0 +1,52 @@
+// SPDX-License-Identifier: GPL-2.0-or-later
+/*
+ * Copyright (C) 2023 MediaTek Inc.
+ *
+ * Author: Chris.Chou <chris.chou@mediatek.com>
+ * Ren-Ting Wang <ren-ting.wang@mediatek.com>
+ */
+
+#include <linux/bitops.h>
+#include <crypto/aes.h>
+#include <crypto/internal/skcipher.h>
+
+#include "crypto-eip/crypto-eip.h"
+#include "crypto-eip/ddk-wrapper.h"
+#include "crypto-eip/lookaside.h"
+#include "crypto-eip/internal.h"
+
+void mtk_crypto_dequeue(struct mtk_crypto_priv *priv)
+{
+ struct crypto_async_request *req;
+ struct crypto_async_request *backlog;
+ struct mtk_crypto_context *ctx;
+ int ret;
+
+ while (true) {
+ spin_lock_bh(&priv->mtk_eip_queue.queue_lock);
+ backlog = crypto_get_backlog(&priv->mtk_eip_queue.queue);
+ req = crypto_dequeue_request(&priv->mtk_eip_queue.queue);
+ spin_unlock_bh(&priv->mtk_eip_queue.queue_lock);
+
+ if (!req)
+ goto finalize;
+
+ ctx = crypto_tfm_ctx(req->tfm);
+ ret = ctx->send(req);
+ if (ret)
+ goto finalize;
+
+ if (backlog)
+ backlog->complete(backlog, -EINPROGRESS);
+ }
+
+finalize:
+ return;
+}
+
+void mtk_crypto_dequeue_work(struct work_struct *work)
+{
+ struct mtk_crypto_work_data *data =
+ container_of(work, struct mtk_crypto_work_data, work);
+ mtk_crypto_dequeue(data->priv);
+}