blob: f4da571fb4900bbe90c86281aa427dcf9f97799e [file] [log] [blame]
Nicolas Toromanoffda968b12020-09-18 09:19:11 +02001/*
2 * Copyright (c) 2022, STMicroelectronics - All Rights Reserved
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6#include <assert.h>
7#include <endian.h>
8#include <errno.h>
9#include <stdint.h>
10
11#include <drivers/clk.h>
12#include <drivers/delay_timer.h>
13#include <drivers/st/stm32_saes.h>
14#include <drivers/st/stm32mp_reset.h>
15#include <lib/mmio.h>
16#include <lib/utils_def.h>
17#include <libfdt.h>
18
19#include <platform_def.h>
20
21#define UINT8_BIT 8U
22#define AES_BLOCK_SIZE_BIT 128U
23#define AES_BLOCK_SIZE (AES_BLOCK_SIZE_BIT / UINT8_BIT)
24
25#define AES_KEYSIZE_128 16U
26#define AES_KEYSIZE_256 32U
27#define AES_IVSIZE 16U
28
29/* SAES control register */
30#define _SAES_CR 0x0U
31/* SAES status register */
32#define _SAES_SR 0x04U
33/* SAES data input register */
34#define _SAES_DINR 0x08U
35/* SAES data output register */
36#define _SAES_DOUTR 0x0CU
37/* SAES key registers [0-3] */
38#define _SAES_KEYR0 0x10U
39#define _SAES_KEYR1 0x14U
40#define _SAES_KEYR2 0x18U
41#define _SAES_KEYR3 0x1CU
42/* SAES initialization vector registers [0-3] */
43#define _SAES_IVR0 0x20U
44#define _SAES_IVR1 0x24U
45#define _SAES_IVR2 0x28U
46#define _SAES_IVR3 0x2CU
47/* SAES key registers [4-7] */
48#define _SAES_KEYR4 0x30U
49#define _SAES_KEYR5 0x34U
50#define _SAES_KEYR6 0x38U
51#define _SAES_KEYR7 0x3CU
52/* SAES suspend registers [0-7] */
53#define _SAES_SUSPR0 0x40U
54#define _SAES_SUSPR1 0x44U
55#define _SAES_SUSPR2 0x48U
56#define _SAES_SUSPR3 0x4CU
57#define _SAES_SUSPR4 0x50U
58#define _SAES_SUSPR5 0x54U
59#define _SAES_SUSPR6 0x58U
60#define _SAES_SUSPR7 0x5CU
61/* SAES Interrupt Enable Register */
62#define _SAES_IER 0x300U
63/* SAES Interrupt Status Register */
64#define _SAES_ISR 0x304U
65/* SAES Interrupt Clear Register */
66#define _SAES_ICR 0x308U
67
68/* SAES control register fields */
69#define _SAES_CR_RESET_VALUE 0x0U
70#define _SAES_CR_IPRST BIT(31)
71#define _SAES_CR_KEYSEL_MASK GENMASK(30, 28)
72#define _SAES_CR_KEYSEL_SHIFT 28U
73#define _SAES_CR_KEYSEL_SOFT 0x0U
74#define _SAES_CR_KEYSEL_DHUK 0x1U
75#define _SAES_CR_KEYSEL_BHK 0x2U
76#define _SAES_CR_KEYSEL_BHU_XOR_BH_K 0x4U
77#define _SAES_CR_KEYSEL_TEST 0x7U
78#define _SAES_CR_KSHAREID_MASK GENMASK(27, 26)
79#define _SAES_CR_KSHAREID_SHIFT 26U
80#define _SAES_CR_KSHAREID_CRYP 0x0U
81#define _SAES_CR_KEYMOD_MASK GENMASK(25, 24)
82#define _SAES_CR_KEYMOD_SHIFT 24U
83#define _SAES_CR_KEYMOD_NORMAL 0x0U
84#define _SAES_CR_KEYMOD_WRAPPED 0x1U
85#define _SAES_CR_KEYMOD_SHARED 0x2U
86#define _SAES_CR_NPBLB_MASK GENMASK(23, 20)
87#define _SAES_CR_NPBLB_SHIFT 20U
88#define _SAES_CR_KEYPROT BIT(19)
89#define _SAES_CR_KEYSIZE BIT(18)
90#define _SAES_CR_GCMPH_MASK GENMASK(14, 13)
91#define _SAES_CR_GCMPH_SHIFT 13U
92#define _SAES_CR_GCMPH_INIT 0U
93#define _SAES_CR_GCMPH_HEADER 1U
94#define _SAES_CR_GCMPH_PAYLOAD 2U
95#define _SAES_CR_GCMPH_FINAL 3U
96#define _SAES_CR_DMAOUTEN BIT(12)
97#define _SAES_CR_DMAINEN BIT(11)
98#define _SAES_CR_CHMOD_MASK (BIT(16) | GENMASK(6, 5))
99#define _SAES_CR_CHMOD_SHIFT 5U
100#define _SAES_CR_CHMOD_ECB 0x0U
101#define _SAES_CR_CHMOD_CBC 0x1U
102#define _SAES_CR_CHMOD_CTR 0x2U
103#define _SAES_CR_CHMOD_GCM 0x3U
104#define _SAES_CR_CHMOD_GMAC 0x3U
105#define _SAES_CR_CHMOD_CCM 0x800U
106#define _SAES_CR_MODE_MASK GENMASK(4, 3)
107#define _SAES_CR_MODE_SHIFT 3U
108#define _SAES_CR_MODE_ENC 0U
109#define _SAES_CR_MODE_KEYPREP 1U
110#define _SAES_CR_MODE_DEC 2U
111#define _SAES_CR_DATATYPE_MASK GENMASK(2, 1)
112#define _SAES_CR_DATATYPE_SHIFT 1U
113#define _SAES_CR_DATATYPE_NONE 0U
114#define _SAES_CR_DATATYPE_HALF_WORD 1U
115#define _SAES_CR_DATATYPE_BYTE 2U
116#define _SAES_CR_DATATYPE_BIT 3U
117#define _SAES_CR_EN BIT(0)
118
119/* SAES status register fields */
120#define _SAES_SR_KEYVALID BIT(7)
121#define _SAES_SR_BUSY BIT(3)
122#define _SAES_SR_WRERR BIT(2)
123#define _SAES_SR_RDERR BIT(1)
124#define _SAES_SR_CCF BIT(0)
125
126/* SAES interrupt registers fields */
127#define _SAES_I_RNG_ERR BIT(3)
128#define _SAES_I_KEY_ERR BIT(2)
129#define _SAES_I_RW_ERR BIT(1)
130#define _SAES_I_CC BIT(0)
131
132#define SAES_TIMEOUT_US 100000U
133#define TIMEOUT_US_1MS 1000U
134#define SAES_RESET_DELAY 20U
135
136#define IS_CHAINING_MODE(mod, cr) \
137 (((cr) & _SAES_CR_CHMOD_MASK) == (_SAES_CR_CHMOD_##mod << _SAES_CR_CHMOD_SHIFT))
138
139#define SET_CHAINING_MODE(mod, cr) \
140 mmio_clrsetbits_32((cr), _SAES_CR_CHMOD_MASK, _SAES_CR_CHMOD_##mod << _SAES_CR_CHMOD_SHIFT)
141
Nicolas Toromanoffda968b12020-09-18 09:19:11 +0200142static struct stm32_saes_platdata saes_pdata;
143
Nicolas Toromanoffda968b12020-09-18 09:19:11 +0200144static int stm32_saes_parse_fdt(struct stm32_saes_platdata *pdata)
145{
146 int node;
147 struct dt_node_info info;
148 void *fdt;
149
150 if (fdt_get_address(&fdt) == 0) {
151 return -FDT_ERR_NOTFOUND;
152 }
153
154 node = dt_get_node(&info, -1, DT_SAES_COMPAT);
155 if (node < 0) {
156 ERROR("No SAES entry in DT\n");
157 return -FDT_ERR_NOTFOUND;
158 }
159
160 if (info.status == DT_DISABLED) {
161 return -FDT_ERR_NOTFOUND;
162 }
163
164 if ((info.base == 0U) || (info.clock < 0) || (info.reset < 0)) {
165 return -FDT_ERR_BADVALUE;
166 }
167
168 pdata->base = (uintptr_t)info.base;
169 pdata->clock_id = (unsigned long)info.clock;
170 pdata->reset_id = (unsigned int)info.reset;
171
172 return 0;
173}
174
175static bool does_chaining_mode_need_iv(uint32_t cr)
176{
177 return !(IS_CHAINING_MODE(ECB, cr));
178}
179
180static bool is_encrypt(uint32_t cr)
181{
182 return (cr & _SAES_CR_MODE_MASK) == (_SAES_CR_MODE_ENC << _SAES_CR_MODE_SHIFT);
183}
184
185static bool is_decrypt(uint32_t cr)
186{
187 return (cr & _SAES_CR_MODE_MASK) == (_SAES_CR_MODE_DEC << _SAES_CR_MODE_SHIFT);
188}
189
190static int wait_computation_completed(uintptr_t base)
191{
192 uint64_t timeout = timeout_init_us(SAES_TIMEOUT_US);
193
194 while ((mmio_read_32(base + _SAES_SR) & _SAES_SR_CCF) != _SAES_SR_CCF) {
195 if (timeout_elapsed(timeout)) {
196 WARN("%s: timeout\n", __func__);
197 return -ETIMEDOUT;
198 }
199 }
200
201 return 0;
202}
203
204static void clear_computation_completed(uintptr_t base)
205{
206 mmio_setbits_32(base + _SAES_ICR, _SAES_I_CC);
207}
208
209static int saes_start(struct stm32_saes_context *ctx)
210{
211 uint64_t timeout;
212
213 /* Reset IP */
214 mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
215 udelay(SAES_RESET_DELAY);
216 mmio_clrbits_32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
217
218 timeout = timeout_init_us(SAES_TIMEOUT_US);
219 while ((mmio_read_32(ctx->base + _SAES_SR) & _SAES_SR_BUSY) == _SAES_SR_BUSY) {
220 if (timeout_elapsed(timeout)) {
221 WARN("%s: timeout\n", __func__);
222 return -ETIMEDOUT;
223 }
224 }
225
226 return 0;
227}
228
229static void saes_end(struct stm32_saes_context *ctx, int prev_error)
230{
231 if (prev_error != 0) {
232 /* Reset IP */
233 mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
234 udelay(SAES_RESET_DELAY);
235 mmio_clrbits_32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
236 }
237
238 /* Disable the SAES peripheral */
239 mmio_clrbits_32(ctx->base + _SAES_CR, _SAES_CR_EN);
240}
241
242static void saes_write_iv(struct stm32_saes_context *ctx)
243{
244 /* If chaining mode need to restore IV */
245 if (does_chaining_mode_need_iv(ctx->cr)) {
246 uint8_t i;
247
248 /* Restore the _SAES_IVRx */
249 for (i = 0U; i < AES_IVSIZE / sizeof(uint32_t); i++) {
250 mmio_write_32(ctx->base + _SAES_IVR0 + i * sizeof(uint32_t), ctx->iv[i]);
251 }
252 }
253
254}
255
256static void saes_write_key(struct stm32_saes_context *ctx)
257{
258 /* Restore the _SAES_KEYRx if SOFTWARE key */
259 if ((ctx->cr & _SAES_CR_KEYSEL_MASK) == (_SAES_CR_KEYSEL_SOFT << _SAES_CR_KEYSEL_SHIFT)) {
260 uint8_t i;
261
262 for (i = 0U; i < AES_KEYSIZE_128 / sizeof(uint32_t); i++) {
263 mmio_write_32(ctx->base + _SAES_KEYR0 + i * sizeof(uint32_t), ctx->key[i]);
264 }
265
266 if ((ctx->cr & _SAES_CR_KEYSIZE) == _SAES_CR_KEYSIZE) {
267 for (i = 0U; i < (AES_KEYSIZE_256 / 2U) / sizeof(uint32_t); i++) {
268 mmio_write_32(ctx->base + _SAES_KEYR4 + i * sizeof(uint32_t),
269 ctx->key[i + 4U]);
270 }
271 }
272 }
273}
274
275static int saes_prepare_key(struct stm32_saes_context *ctx)
276{
277 /* Disable the SAES peripheral */
278 mmio_clrbits_32(ctx->base + _SAES_CR, _SAES_CR_EN);
279
280 /* Set key size */
281 if ((ctx->cr & _SAES_CR_KEYSIZE) != 0U) {
282 mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_KEYSIZE);
283 } else {
284 mmio_clrbits_32(ctx->base + _SAES_CR, _SAES_CR_KEYSIZE);
285 }
286
287 saes_write_key(ctx);
288
289 /* For ECB/CBC decryption, key preparation mode must be selected to populate the key */
290 if ((IS_CHAINING_MODE(ECB, ctx->cr) || IS_CHAINING_MODE(CBC, ctx->cr)) &&
291 is_decrypt(ctx->cr)) {
292 int ret;
293
294 /* Select Mode 2 */
295 mmio_clrsetbits_32(ctx->base + _SAES_CR, _SAES_CR_MODE_MASK,
296 _SAES_CR_MODE_KEYPREP << _SAES_CR_MODE_SHIFT);
297
298 /* Enable SAES */
299 mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_EN);
300
301 /* Wait Computation completed */
302 ret = wait_computation_completed(ctx->base);
303 if (ret != 0) {
304 return ret;
305 }
306
307 clear_computation_completed(ctx->base);
308
309 /* Set Mode 3 */
310 mmio_clrsetbits_32(ctx->base + _SAES_CR, _SAES_CR_MODE_MASK,
311 _SAES_CR_MODE_DEC << _SAES_CR_MODE_SHIFT);
312 }
313
314 return 0;
315}
316
317static int save_context(struct stm32_saes_context *ctx)
318{
319 if ((mmio_read_32(ctx->base + _SAES_SR) & _SAES_SR_CCF) != 0U) {
320 /* Device should not be in a processing phase */
321 return -EINVAL;
322 }
323
324 /* Save CR */
325 ctx->cr = mmio_read_32(ctx->base + _SAES_CR);
326
327 /* If chaining mode need to save current IV */
328 if (does_chaining_mode_need_iv(ctx->cr)) {
329 uint8_t i;
330
331 /* Save IV */
332 for (i = 0U; i < AES_IVSIZE / sizeof(uint32_t); i++) {
333 ctx->iv[i] = mmio_read_32(ctx->base + _SAES_IVR0 + i * sizeof(uint32_t));
334 }
335 }
336
337 /* Disable the SAES peripheral */
338 mmio_clrbits_32(ctx->base + _SAES_CR, _SAES_CR_EN);
339
340 return 0;
341}
342
343/* To resume the processing of a message */
344static int restore_context(struct stm32_saes_context *ctx)
345{
346 int ret;
347
348 /* IP should be disabled */
349 if ((mmio_read_32(ctx->base + _SAES_CR) & _SAES_CR_EN) != 0U) {
350 VERBOSE("%s: Device is still enabled\n", __func__);
351 return -EINVAL;
352 }
353
354 /* Reset internal state */
355 mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
356
357 /* Restore the _SAES_CR */
358 mmio_write_32(ctx->base + _SAES_CR, ctx->cr);
359
360 /* Preparation decrypt key */
361 ret = saes_prepare_key(ctx);
362 if (ret != 0) {
363 return ret;
364 }
365
366 saes_write_iv(ctx);
367
368 /* Enable the SAES peripheral */
369 mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_EN);
370
371 return 0;
372}
373
374/**
375 * @brief Initialize SAES driver.
376 * @param None.
377 * @retval 0 if OK; negative value else.
378 */
379int stm32_saes_driver_init(void)
380{
381 int err;
382
383 err = stm32_saes_parse_fdt(&saes_pdata);
384 if (err != 0) {
Yann Gautier55f3eb42022-12-13 10:51:56 +0100385 return err;
Nicolas Toromanoffda968b12020-09-18 09:19:11 +0200386 }
387
388 clk_enable(saes_pdata.clock_id);
389 if (stm32mp_reset_assert(saes_pdata.reset_id, TIMEOUT_US_1MS) != 0) {
390 panic();
391 }
392
393 udelay(SAES_RESET_DELAY);
394 if (stm32mp_reset_deassert(saes_pdata.reset_id, TIMEOUT_US_1MS) != 0) {
395 panic();
396 }
397
398 return 0;
399}
400
401/**
402 * @brief Start a AES computation.
403 * @param ctx: SAES process context
404 * @param is_dec: true if decryption, false if encryption
405 * @param ch_mode: define the chaining mode
406 * @param key_select: define where the key comes from.
407 * @param key: pointer to key (if key_select is KEY_SOFT, else unused)
408 * @param key_size: key size
409 * @param iv: pointer to initialization vectore (unsed if ch_mode is ECB)
410 * @param iv_size: iv size
411 * @note this function doesn't access to hardware but store in ctx the values
412 *
413 * @retval 0 if OK; negative value else.
414 */
415int stm32_saes_init(struct stm32_saes_context *ctx, bool is_dec,
416 enum stm32_saes_chaining_mode ch_mode, enum stm32_saes_key_selection key_select,
417 const void *key, size_t key_size, const void *iv, size_t iv_size)
418{
419 unsigned int i;
420 const uint32_t *iv_u32;
421 const uint32_t *key_u32;
422
423 ctx->assoc_len = 0U;
424 ctx->load_len = 0U;
425
426 ctx->base = saes_pdata.base;
427 ctx->cr = _SAES_CR_RESET_VALUE;
428
429 /* We want buffer to be u32 aligned */
430 assert((uintptr_t)key % __alignof__(uint32_t) == 0);
431 assert((uintptr_t)iv % __alignof__(uint32_t) == 0);
432
433 iv_u32 = iv;
434 key_u32 = key;
435
436 if (is_dec) {
437 /* Save Mode 3 = decrypt */
438 mmio_clrsetbits_32((uintptr_t)&(ctx->cr), _SAES_CR_MODE_MASK,
439 _SAES_CR_MODE_DEC << _SAES_CR_MODE_SHIFT);
440 } else {
441 /* Save Mode 1 = crypt */
442 mmio_clrsetbits_32((uintptr_t)&(ctx->cr), _SAES_CR_MODE_MASK,
443 _SAES_CR_MODE_ENC << _SAES_CR_MODE_SHIFT);
444 }
445
446 /* Save chaining mode */
447 switch (ch_mode) {
448 case STM32_SAES_MODE_ECB:
449 SET_CHAINING_MODE(ECB, (uintptr_t)&(ctx->cr));
450 break;
451 case STM32_SAES_MODE_CBC:
452 SET_CHAINING_MODE(CBC, (uintptr_t)&(ctx->cr));
453 break;
454 case STM32_SAES_MODE_CTR:
455 SET_CHAINING_MODE(CTR, (uintptr_t)&(ctx->cr));
456 break;
457 case STM32_SAES_MODE_GCM:
458 SET_CHAINING_MODE(GCM, (uintptr_t)&(ctx->cr));
459 break;
460 case STM32_SAES_MODE_CCM:
461 SET_CHAINING_MODE(CCM, (uintptr_t)&(ctx->cr));
462 break;
463 default:
464 return -EINVAL;
465 }
466
467 /* We will use HW Byte swap (_SAES_CR_DATATYPE_BYTE) for data.
468 * so we won't need to
469 * htobe32(data) before write to DINR
470 * nor
471 * be32toh after reading from DOUTR
472 *
473 * But note that wrap key only accept _SAES_CR_DATATYPE_NONE
474 */
475 mmio_clrsetbits_32((uintptr_t)&(ctx->cr), _SAES_CR_DATATYPE_MASK,
476 _SAES_CR_DATATYPE_BYTE << _SAES_CR_DATATYPE_SHIFT);
477
478 /* Configure keysize */
479 switch (key_size) {
480 case AES_KEYSIZE_128:
481 mmio_clrbits_32((uintptr_t)&(ctx->cr), _SAES_CR_KEYSIZE);
482 break;
483 case AES_KEYSIZE_256:
484 mmio_setbits_32((uintptr_t)&(ctx->cr), _SAES_CR_KEYSIZE);
485 break;
486 default:
487 return -EINVAL;
488 }
489
490 /* Configure key */
491 switch (key_select) {
492 case STM32_SAES_KEY_SOFT:
493 mmio_clrsetbits_32((uintptr_t)&(ctx->cr), _SAES_CR_KEYSEL_MASK,
494 _SAES_CR_KEYSEL_SOFT << _SAES_CR_KEYSEL_SHIFT);
495 /* Save key */
496 switch (key_size) {
497 case AES_KEYSIZE_128:
498 /* First 16 bytes == 4 u32 */
499 for (i = 0U; i < AES_KEYSIZE_128 / sizeof(uint32_t); i++) {
500 mmio_write_32((uintptr_t)(ctx->key + i), htobe32(key_u32[3 - i]));
501 /* /!\ we save the key in HW byte order
502 * and word order : key[i] is for _SAES_KEYRi
503 */
504 }
505 break;
506 case AES_KEYSIZE_256:
507 for (i = 0U; i < AES_KEYSIZE_256 / sizeof(uint32_t); i++) {
508 mmio_write_32((uintptr_t)(ctx->key + i), htobe32(key_u32[7 - i]));
509 /* /!\ we save the key in HW byte order
510 * and word order : key[i] is for _SAES_KEYRi
511 */
512 }
513 break;
514 default:
515 return -EINVAL;
516 }
517
518 break;
519 case STM32_SAES_KEY_DHU:
520 mmio_clrsetbits_32((uintptr_t)&(ctx->cr), _SAES_CR_KEYSEL_MASK,
521 _SAES_CR_KEYSEL_DHUK << _SAES_CR_KEYSEL_SHIFT);
522 break;
523 case STM32_SAES_KEY_BH:
524 mmio_clrsetbits_32((uintptr_t)&(ctx->cr), _SAES_CR_KEYSEL_MASK,
525 _SAES_CR_KEYSEL_BHK << _SAES_CR_KEYSEL_SHIFT);
526 break;
527 case STM32_SAES_KEY_BHU_XOR_BH:
528 mmio_clrsetbits_32((uintptr_t)&(ctx->cr), _SAES_CR_KEYSEL_MASK,
529 _SAES_CR_KEYSEL_BHU_XOR_BH_K << _SAES_CR_KEYSEL_SHIFT);
530 break;
531 case STM32_SAES_KEY_WRAPPED:
532 mmio_clrsetbits_32((uintptr_t)&(ctx->cr), _SAES_CR_KEYSEL_MASK,
533 _SAES_CR_KEYSEL_SOFT << _SAES_CR_KEYSEL_SHIFT);
534 break;
535
536 default:
537 return -EINVAL;
538 }
539
540 /* Save IV */
541 if (ch_mode != STM32_SAES_MODE_ECB) {
542 if ((iv == NULL) || (iv_size != AES_IVSIZE)) {
543 return -EINVAL;
544 }
545
546 for (i = 0U; i < AES_IVSIZE / sizeof(uint32_t); i++) {
547 mmio_write_32((uintptr_t)(ctx->iv + i), htobe32(iv_u32[3 - i]));
548 /* /!\ We save the iv in HW byte order */
549 }
550 }
551
552 return saes_start(ctx);
553}
554
555/**
556 * @brief Update (or start) a AES authentificate process of associated data (CCM or GCM).
557 * @param ctx: SAES process context
558 * @param last_block: true if last assoc data block
559 * @param data: pointer to associated data
560 * @param data_size: data size
561 *
562 * @retval 0 if OK; negative value else.
563 */
564int stm32_saes_update_assodata(struct stm32_saes_context *ctx, bool last_block,
565 uint8_t *data, size_t data_size)
566{
567 int ret;
568 uint32_t *data_u32;
569 unsigned int i = 0U;
570
571 /* We want buffers to be u32 aligned */
572 assert((uintptr_t)data % __alignof__(uint32_t) == 0);
573 data_u32 = (uint32_t *)data;
574
575 /* Init phase */
576 ret = restore_context(ctx);
577 if (ret != 0) {
578 goto out;
579 }
580
581 ret = wait_computation_completed(ctx->base);
582 if (ret != 0) {
583 return ret;
584 }
585
586 clear_computation_completed(ctx->base);
587
588 if ((data == NULL) || (data_size == 0U)) {
589 /* No associated data */
590 /* ret already = 0 */
591 goto out;
592 }
593
594 /* There is an header/associated data phase */
595 mmio_clrsetbits_32(ctx->base + _SAES_CR, _SAES_CR_GCMPH_MASK,
596 _SAES_CR_GCMPH_HEADER << _SAES_CR_GCMPH_SHIFT);
597
598 /* Enable the SAES peripheral */
599 mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_EN);
600
601 while (i < round_down(data_size, AES_BLOCK_SIZE)) {
602 unsigned int w; /* Word index */
603
604 w = i / sizeof(uint32_t);
605 /* No need to htobe() as we configure the HW to swap bytes */
606 mmio_write_32(ctx->base + _SAES_DINR, data_u32[w + 0U]);
607 mmio_write_32(ctx->base + _SAES_DINR, data_u32[w + 1U]);
608 mmio_write_32(ctx->base + _SAES_DINR, data_u32[w + 2U]);
609 mmio_write_32(ctx->base + _SAES_DINR, data_u32[w + 3U]);
610
611 ret = wait_computation_completed(ctx->base);
612 if (ret != 0) {
613 goto out;
614 }
615
616 clear_computation_completed(ctx->base);
617
618 /* Process next block */
619 i += AES_BLOCK_SIZE;
620 ctx->assoc_len += AES_BLOCK_SIZE_BIT;
621 }
622
623 /* Manage last block if not a block size multiple */
624 if ((last_block) && (i < data_size)) {
625 /* We don't manage unaligned last block yet */
626 ret = -ENODEV;
627 goto out;
628 }
629
630out:
631 if (ret != 0) {
632 saes_end(ctx, ret);
633 }
634
635 return ret;
636}
637
638/**
639 * @brief Update (or start) a AES authenticate and de/encrypt with payload data (CCM or GCM).
640 * @param ctx: SAES process context
641 * @param last_block: true if last payload data block
642 * @param data_in: pointer to payload
643 * @param data_out: pointer where to save de/encrypted payload
644 * @param data_size: payload size
645 *
646 * @retval 0 if OK; negative value else.
647 */
648int stm32_saes_update_load(struct stm32_saes_context *ctx, bool last_block,
649 uint8_t *data_in, uint8_t *data_out, size_t data_size)
650{
651 int ret = 0;
652 uint32_t *data_in_u32;
653 uint32_t *data_out_u32;
654 unsigned int i = 0U;
655 uint32_t prev_cr;
656
657 /* We want buffers to be u32 aligned */
658 assert((uintptr_t)data_in % __alignof__(uint32_t) == 0);
659 assert((uintptr_t)data_out % __alignof__(uint32_t) == 0);
660 data_in_u32 = (uint32_t *)data_in;
661 data_out_u32 = (uint32_t *)data_out;
662
663 prev_cr = mmio_read_32(ctx->base + _SAES_CR);
664
665 if ((data_in == NULL) || (data_size == 0U)) {
666 /* there is no data */
667 goto out;
668 }
669
670 /* There is a load phase */
671 mmio_clrsetbits_32(ctx->base + _SAES_CR, _SAES_CR_GCMPH_MASK,
672 _SAES_CR_GCMPH_PAYLOAD << _SAES_CR_GCMPH_SHIFT);
673
674 if ((prev_cr & _SAES_CR_GCMPH_MASK) ==
675 (_SAES_CR_GCMPH_INIT << _SAES_CR_GCMPH_SHIFT)) {
676 /* Still in initialization phase, no header
677 * We need to enable the SAES peripheral
678 */
679 mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_EN);
680 }
681
682 while (i < round_down(data_size, AES_BLOCK_SIZE)) {
683 unsigned int w; /* Word index */
684
685 w = i / sizeof(uint32_t);
686 /* No need to htobe() as we configure the HW to swap bytes */
687 mmio_write_32(ctx->base + _SAES_DINR, data_in_u32[w + 0U]);
688 mmio_write_32(ctx->base + _SAES_DINR, data_in_u32[w + 1U]);
689 mmio_write_32(ctx->base + _SAES_DINR, data_in_u32[w + 2U]);
690 mmio_write_32(ctx->base + _SAES_DINR, data_in_u32[w + 3U]);
691
692 ret = wait_computation_completed(ctx->base);
693 if (ret != 0) {
694 goto out;
695 }
696
697 /* No need to htobe() as we configure the HW to swap bytes */
698 data_out_u32[w + 0U] = mmio_read_32(ctx->base + _SAES_DOUTR);
699 data_out_u32[w + 1U] = mmio_read_32(ctx->base + _SAES_DOUTR);
700 data_out_u32[w + 2U] = mmio_read_32(ctx->base + _SAES_DOUTR);
701 data_out_u32[w + 3U] = mmio_read_32(ctx->base + _SAES_DOUTR);
702
703 clear_computation_completed(ctx->base);
704
705 /* Process next block */
706 i += AES_BLOCK_SIZE;
707 ctx->load_len += AES_BLOCK_SIZE_BIT;
708 }
709 /* Manage last block if not a block size multiple */
710 if ((last_block) && (i < data_size)) {
711 uint32_t block_in[AES_BLOCK_SIZE / sizeof(uint32_t)] = {0};
712 uint32_t block_out[AES_BLOCK_SIZE / sizeof(uint32_t)] = {0};
713
714 memcpy(block_in, data_in + i, data_size - i);
715
716 /* No need to htobe() as we configure the HW to swap bytes */
717 mmio_write_32(ctx->base + _SAES_DINR, block_in[0U]);
718 mmio_write_32(ctx->base + _SAES_DINR, block_in[1U]);
719 mmio_write_32(ctx->base + _SAES_DINR, block_in[2U]);
720 mmio_write_32(ctx->base + _SAES_DINR, block_in[3U]);
721
722 ret = wait_computation_completed(ctx->base);
723 if (ret != 0) {
724 VERBOSE("%s %d\n", __func__, __LINE__);
725 goto out;
726 }
727
728 /* No need to htobe() as we configure the HW to swap bytes */
729 block_out[0U] = mmio_read_32(ctx->base + _SAES_DOUTR);
730 block_out[1U] = mmio_read_32(ctx->base + _SAES_DOUTR);
731 block_out[2U] = mmio_read_32(ctx->base + _SAES_DOUTR);
732 block_out[3U] = mmio_read_32(ctx->base + _SAES_DOUTR);
733
734 clear_computation_completed(ctx->base);
735
736 memcpy(data_out + i, block_out, data_size - i);
737
738 ctx->load_len += (data_size - i) * UINT8_BIT;
739 }
740
741out:
742 if (ret != 0) {
743 saes_end(ctx, ret);
744 }
745
746 return ret;
747}
748
749/**
750 * @brief Get authentication tag for AES authenticated algorithms (CCM or GCM).
751 * @param ctx: SAES process context
752 * @param tag: pointer where to save the tag
753 * @param data_size: tag size
754 *
755 * @retval 0 if OK; negative value else.
756 */
757int stm32_saes_final(struct stm32_saes_context *ctx, uint8_t *tag,
758 size_t tag_size)
759{
760 int ret;
761 uint32_t tag_u32[4];
762 uint32_t prev_cr;
763
764 prev_cr = mmio_read_32(ctx->base + _SAES_CR);
765
766 mmio_clrsetbits_32(ctx->base + _SAES_CR, _SAES_CR_GCMPH_MASK,
767 _SAES_CR_GCMPH_FINAL << _SAES_CR_GCMPH_SHIFT);
768
769 if ((prev_cr & _SAES_CR_GCMPH_MASK) == (_SAES_CR_GCMPH_INIT << _SAES_CR_GCMPH_SHIFT)) {
770 /* Still in initialization phase, no header
771 * We need to enable the SAES peripheral
772 */
773 mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_EN);
774 }
775
776 /* No need to htobe() as we configure the HW to swap bytes */
777 mmio_write_32(ctx->base + _SAES_DINR, 0);
778 mmio_write_32(ctx->base + _SAES_DINR, ctx->assoc_len);
779 mmio_write_32(ctx->base + _SAES_DINR, 0);
780 mmio_write_32(ctx->base + _SAES_DINR, ctx->load_len);
781
782 ret = wait_computation_completed(ctx->base);
783 if (ret != 0) {
784 goto out;
785 }
786
787 /* No need to htobe() as we configure the HW to swap bytes */
788 tag_u32[0] = mmio_read_32(ctx->base + _SAES_DOUTR);
789 tag_u32[1] = mmio_read_32(ctx->base + _SAES_DOUTR);
790 tag_u32[2] = mmio_read_32(ctx->base + _SAES_DOUTR);
791 tag_u32[3] = mmio_read_32(ctx->base + _SAES_DOUTR);
792
793 clear_computation_completed(ctx->base);
794
795 memcpy(tag, tag_u32, MIN(sizeof(tag_u32), tag_size));
796
797out:
798 saes_end(ctx, ret);
799
800 return ret;
801}
802
803/**
804 * @brief Update (or start) a AES de/encrypt process (ECB, CBC or CTR).
805 * @param ctx: SAES process context
806 * @param last_block: true if last payload data block
807 * @param data_in: pointer to payload
808 * @param data_out: pointer where to save de/encrypted payload
809 * @param data_size: payload size
810 *
811 * @retval 0 if OK; negative value else.
812 */
813int stm32_saes_update(struct stm32_saes_context *ctx, bool last_block,
814 uint8_t *data_in, uint8_t *data_out, size_t data_size)
815{
816 int ret;
817 uint32_t *data_in_u32;
818 uint32_t *data_out_u32;
819 unsigned int i = 0U;
820
821 /* We want buffers to be u32 aligned */
822 assert((uintptr_t)data_in % __alignof__(uint32_t) == 0);
823 assert((uintptr_t)data_out % __alignof__(uint32_t) == 0);
824 data_in_u32 = (uint32_t *)data_in;
825 data_out_u32 = (uint32_t *)data_out;
826
827 if ((!last_block) &&
828 (round_down(data_size, AES_BLOCK_SIZE) != data_size)) {
829 ERROR("%s: non last block must be multiple of 128 bits\n",
830 __func__);
831 ret = -EINVAL;
832 goto out;
833 }
834
835 /* In CBC encryption we need to manage specifically last 2 128bits
836 * blocks if total size in not a block size aligned
837 * work TODO. Currently return ENODEV.
838 * Morevoer as we need to know last 2 block, if unaligned and
839 * call with less than two block, return -EINVAL.
840 */
841 if (last_block && IS_CHAINING_MODE(CBC, ctx->cr) && is_encrypt(ctx->cr) &&
842 (round_down(data_size, AES_BLOCK_SIZE) != data_size)) {
843 if (data_size < AES_BLOCK_SIZE * 2U) {
844 ERROR("if CBC, last part size should be at least 2 * AES_BLOCK_SIZE\n");
845 ret = -EINVAL;
846 goto out;
847 }
848 /* Moreover the CBC specific padding for encrypt is not yet implemented */
849 ret = -ENODEV;
850 goto out;
851 }
852
853 ret = restore_context(ctx);
854 if (ret != 0) {
855 goto out;
856 }
857
858 while (i < round_down(data_size, AES_BLOCK_SIZE)) {
859 unsigned int w; /* Word index */
860
861 w = i / sizeof(uint32_t);
862 /* No need to htobe() as we configure the HW to swap bytes */
863 mmio_write_32(ctx->base + _SAES_DINR, data_in_u32[w + 0U]);
864 mmio_write_32(ctx->base + _SAES_DINR, data_in_u32[w + 1U]);
865 mmio_write_32(ctx->base + _SAES_DINR, data_in_u32[w + 2U]);
866 mmio_write_32(ctx->base + _SAES_DINR, data_in_u32[w + 3U]);
867
868 ret = wait_computation_completed(ctx->base);
869 if (ret != 0) {
870 goto out;
871 }
872
873 /* No need to htobe() as we configure the HW to swap bytes */
874 data_out_u32[w + 0U] = mmio_read_32(ctx->base + _SAES_DOUTR);
875 data_out_u32[w + 1U] = mmio_read_32(ctx->base + _SAES_DOUTR);
876 data_out_u32[w + 2U] = mmio_read_32(ctx->base + _SAES_DOUTR);
877 data_out_u32[w + 3U] = mmio_read_32(ctx->base + _SAES_DOUTR);
878
879 clear_computation_completed(ctx->base);
880
881 /* Process next block */
882 i += AES_BLOCK_SIZE;
883 }
884 /* Manage last block if not a block size multiple */
885
886 if ((last_block) && (i < data_size)) {
887 /* In and out buffer have same size so should be AES_BLOCK_SIZE multiple */
888 ret = -ENODEV;
889 goto out;
890 }
891
892 if (!last_block) {
893 ret = save_context(ctx);
894 }
895
896out:
897 /* If last block or error, end of SAES process */
898 if (last_block || (ret != 0)) {
899 saes_end(ctx, ret);
900 }
901
902 return ret;
903}